From b72dc54e605cb6257d1e7fce55bc33745eba60fb Mon Sep 17 00:00:00 2001 From: AztecBot Date: Tue, 23 Jul 2024 21:21:54 +0000 Subject: [PATCH 1/4] chore: apply sync fixes --- .aztec-sync-commit | 2 +- .github/workflows/test-js-packages.yml | 53 + .release-please-manifest.json | 4 +- CHANGELOG.md | 132 + Cargo.lock | 60 +- Cargo.toml | 16 +- acvm-repo/CHANGELOG.md | 133 + acvm-repo/acir/Cargo.toml | 2 +- acvm-repo/acir/codegen/acir.cpp | 1874 ++++++++------ .../opcodes/black_box_function_call.rs | 3 +- acvm-repo/acir/src/lib.rs | 6 +- .../acir/tests/test_program_serialization.rs | 33 +- acvm-repo/acir_field/Cargo.toml | 2 +- acvm-repo/acir_field/src/field_element.rs | 32 +- acvm-repo/acvm/Cargo.toml | 2 +- acvm-repo/acvm/src/pwg/brillig.rs | 3 +- acvm-repo/acvm_js/Cargo.toml | 2 +- acvm-repo/acvm_js/package.json | 2 +- .../test/shared/complex_foreign_call.ts | 14 +- acvm-repo/acvm_js/test/shared/foreign_call.ts | 8 +- acvm-repo/blackbox_solver/Cargo.toml | 2 +- acvm-repo/blackbox_solver/src/bigint.rs | 4 +- acvm-repo/bn254_blackbox_solver/Cargo.toml | 2 +- acvm-repo/brillig/Cargo.toml | 2 +- acvm-repo/brillig/src/lib.rs | 2 +- acvm-repo/brillig/src/opcodes.rs | 91 +- acvm-repo/brillig_vm/Cargo.toml | 2 +- acvm-repo/brillig_vm/src/arithmetic.rs | 245 +- acvm-repo/brillig_vm/src/black_box.rs | 22 +- acvm-repo/brillig_vm/src/lib.rs | 145 +- acvm-repo/brillig_vm/src/memory.rs | 215 +- .../src/transforms/contract_interface.rs | 69 +- aztec_macros/src/transforms/note_interface.rs | 5 +- aztec_macros/src/transforms/storage.rs | 1 + aztec_macros/src/utils/constants.rs | 1 - aztec_macros/src/utils/hir_utils.rs | 46 +- compiler/integration-tests/package.json | 2 +- compiler/noirc_driver/src/lib.rs | 18 +- .../noirc_driver/tests/stdlib_warnings.rs | 2 +- compiler/noirc_errors/src/position.rs | 4 + .../brillig/brillig_gen/brillig_black_box.rs | 46 +- .../brillig/brillig_gen/brillig_directive.rs | 146 +- .../noirc_evaluator/src/brillig/brillig_ir.rs | 9 +- .../brillig/brillig_ir/brillig_variable.rs | 8 +- .../brillig/brillig_ir/codegen_intrinsic.rs | 47 +- .../src/brillig/brillig_ir/instructions.rs | 16 +- compiler/noirc_evaluator/src/ssa.rs | 19 +- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 4 +- .../ssa/acir_gen/acir_ir/generated_acir.rs | 26 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 103 +- .../src/ssa/ir/function_inserter.rs | 37 +- .../src/ssa/ir/instruction/call.rs | 45 +- .../src/ssa/opt/flatten_cfg.rs | 75 +- .../src/ssa/ssa_gen/context.rs | 11 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 2 +- compiler/noirc_frontend/Cargo.toml | 1 + compiler/noirc_frontend/src/ast/expression.rs | 23 +- compiler/noirc_frontend/src/ast/mod.rs | 18 +- compiler/noirc_frontend/src/ast/statement.rs | 4 +- compiler/noirc_frontend/src/ast/structure.rs | 13 +- compiler/noirc_frontend/src/ast/traits.rs | 7 +- .../noirc_frontend/src/elaborator/comptime.rs | 71 + .../src/elaborator/expressions.rs | 88 +- compiler/noirc_frontend/src/elaborator/mod.rs | 547 ++-- .../noirc_frontend/src/elaborator/patterns.rs | 43 +- .../noirc_frontend/src/elaborator/scope.rs | 22 +- .../src/elaborator/statements.rs | 16 +- .../src/elaborator/trait_impls.rs | 228 ++ .../noirc_frontend/src/elaborator/traits.rs | 220 +- .../noirc_frontend/src/elaborator/types.rs | 120 +- .../noirc_frontend/src/hir/comptime/errors.rs | 19 +- .../src/hir/comptime/interpreter.rs | 327 ++- .../src/hir/comptime/interpreter/builtin.rs | 317 ++- .../src/hir/comptime/interpreter/foreign.rs | 48 + .../src/hir/comptime/interpreter/unquote.rs | 5 +- .../noirc_frontend/src/hir/comptime/mod.rs | 1 - .../noirc_frontend/src/hir/comptime/scan.rs | 271 -- .../noirc_frontend/src/hir/comptime/tests.rs | 117 +- .../noirc_frontend/src/hir/comptime/value.rs | 59 +- .../src/hir/def_collector/dc_crate.rs | 312 +-- .../src/hir/def_collector/dc_mod.rs | 42 +- .../src/hir/def_collector/errors.rs | 27 + .../noirc_frontend/src/hir/def_map/mod.rs | 4 +- .../src/hir/resolution/functions.rs | 84 - .../src/hir/resolution/globals.rs | 46 - .../src/hir/resolution/impls.rs | 145 -- .../src/hir/resolution/import.rs | 45 +- .../noirc_frontend/src/hir/resolution/mod.rs | 48 - .../src/hir/resolution/path_resolver.rs | 9 +- .../src/hir/resolution/resolver.rs | 2204 ----------------- .../src/hir/resolution/structs.rs | 83 - .../src/hir/resolution/traits.rs | 506 ---- .../src/hir/resolution/type_aliases.rs | 33 - .../noirc_frontend/src/hir/type_check/expr.rs | 1395 ----------- .../noirc_frontend/src/hir/type_check/mod.rs | 798 +----- .../noirc_frontend/src/hir/type_check/stmt.rs | 395 --- compiler/noirc_frontend/src/hir_def/expr.rs | 12 +- .../noirc_frontend/src/hir_def/function.rs | 39 +- compiler/noirc_frontend/src/hir_def/traits.rs | 9 +- compiler/noirc_frontend/src/hir_def/types.rs | 514 ++-- compiler/noirc_frontend/src/lexer/token.rs | 16 +- compiler/noirc_frontend/src/locations.rs | 132 +- .../src/monomorphization/mod.rs | 23 +- compiler/noirc_frontend/src/node_interner.rs | 68 +- compiler/noirc_frontend/src/parser/errors.rs | 6 +- compiler/noirc_frontend/src/parser/mod.rs | 20 +- compiler/noirc_frontend/src/parser/parser.rs | 32 +- .../src/parser/parser/function.rs | 38 +- .../noirc_frontend/src/parser/parser/path.rs | 2 + .../src/parser/parser/structs.rs | 15 +- .../src/parser/parser/traits.rs | 29 +- .../noirc_frontend/src/parser/parser/types.rs | 67 +- compiler/noirc_frontend/src/tests.rs | 496 +++- compiler/wasm/package.json | 2 +- compiler/wasm/src/compile.rs | 20 +- compiler/wasm/src/compile_new.rs | 15 +- cspell.json | 2 + docs/docs/explainers/explainer-oracle.md | 4 +- .../hello_noir/project_breakdown.md | 1 + docs/docs/how_to/how-to-oracles.md | 4 +- docs/docs/how_to/how-to-recursion.md | 2 +- docs/docs/how_to/how-to-solidity-verifier.md | 6 +- .../concepts/{data_bus.md => data_bus.mdx} | 4 +- docs/docs/noir/concepts/generics.md | 57 + .../noir/concepts/{oracles.md => oracles.mdx} | 6 +- .../noir/modules_packages_crates/modules.md | 34 + docs/src/components/Notes/_experimental.mdx | 7 +- .../explainers/explainer-oracle.md | 57 + .../explainers/explainer-recursion.md | 176 ++ .../getting_started/_category_.json | 5 + .../barretenberg/_category_.json | 6 + .../getting_started/barretenberg/index.md | 47 + .../hello_noir/_category_.json | 5 + .../getting_started/hello_noir/index.md | 145 ++ .../hello_noir/project_breakdown.md | 160 ++ .../installation/_category_.json | 6 + .../getting_started/installation/index.md | 48 + .../installation/other_install_methods.md | 102 + .../getting_started/tooling/noir_codegen.md | 114 + .../version-v0.32.0/how_to/_category_.json | 5 + .../how_to/debugger/_category_.json | 6 + .../debugger/debugging_with_the_repl.md | 164 ++ .../how_to/debugger/debugging_with_vs_code.md | 68 + .../version-v0.32.0/how_to/how-to-oracles.md | 273 ++ .../how_to/how-to-recursion.md | 180 ++ .../how_to/how-to-solidity-verifier.md | 251 ++ .../version-v0.32.0/how_to/merkle-proof.mdx | 48 + .../how_to/using-devcontainers.mdx | 110 + docs/versioned_docs/version-v0.32.0/index.mdx | 67 + .../version-v0.32.0/migration_notes.md | 105 + .../noir/concepts/_category_.json | 6 + .../version-v0.32.0/noir/concepts/assert.md | 78 + .../version-v0.32.0/noir/concepts/comments.md | 33 + .../noir/concepts/control_flow.md | 77 + .../noir/concepts/data_bus.mdx | 23 + .../noir/concepts/data_types/_category_.json | 5 + .../noir/concepts/data_types/arrays.md | 253 ++ .../noir/concepts/data_types/booleans.md | 28 + .../noir/concepts/data_types/fields.md | 192 ++ .../concepts/data_types/function_types.md | 26 + .../noir/concepts/data_types/index.md | 118 + .../noir/concepts/data_types/integers.md | 156 ++ .../noir/concepts/data_types/references.md | 23 + .../noir/concepts/data_types/slices.mdx | 358 +++ .../noir/concepts/data_types/strings.md | 79 + .../noir/concepts/data_types/structs.md | 70 + .../noir/concepts/data_types/tuples.md | 48 + .../noir/concepts/functions.md | 226 ++ .../version-v0.32.0/noir/concepts/generics.md | 106 + .../version-v0.32.0/noir/concepts/globals.md | 72 + .../version-v0.32.0/noir/concepts/lambdas.md | 81 + .../noir/concepts/mutability.md | 121 + .../version-v0.32.0/noir/concepts/ops.md | 98 + .../version-v0.32.0/noir/concepts/oracles.mdx | 29 + .../noir/concepts/shadowing.md | 44 + .../version-v0.32.0/noir/concepts/traits.md | 405 +++ .../noir/concepts/unconstrained.md | 99 + .../modules_packages_crates/_category_.json | 6 + .../crates_and_packages.md | 43 + .../modules_packages_crates/dependencies.md | 124 + .../noir/modules_packages_crates/modules.md | 185 ++ .../modules_packages_crates/workspaces.md | 42 + .../noir/standard_library/_category_.json | 6 + .../noir/standard_library/bigint.md | 122 + .../noir/standard_library/black_box_fns.md | 32 + .../noir/standard_library/bn254.md | 46 + .../standard_library/containers/boundedvec.md | 419 ++++ .../standard_library/containers/hashmap.md | 570 +++++ .../noir/standard_library/containers/index.md | 5 + .../noir/standard_library/containers/vec.mdx | 170 ++ .../cryptographic_primitives/_category_.json | 5 + .../cryptographic_primitives/ciphers.mdx | 32 + .../cryptographic_primitives/ec_primitives.md | 102 + .../ecdsa_sig_verification.mdx | 98 + .../cryptographic_primitives/eddsa.mdx | 37 + .../embedded_curve_ops.mdx | 98 + .../cryptographic_primitives/hashes.mdx | 253 ++ .../cryptographic_primitives/index.md | 14 + .../cryptographic_primitives/schnorr.mdx | 64 + .../noir/standard_library/is_unconstrained.md | 69 + .../noir/standard_library/logging.md | 78 + .../noir/standard_library/merkle_trees.md | 58 + .../noir/standard_library/options.md | 101 + .../noir/standard_library/recursion.md | 85 + .../noir/standard_library/traits.md | 501 ++++ .../noir/standard_library/zeroed.md | 26 + .../NoirJS/backend_barretenberg/.nojekyll | 1 + .../classes/BarretenbergBackend.md | 141 ++ .../classes/BarretenbergVerifier.md | 58 + .../NoirJS/backend_barretenberg/index.md | 40 + .../type-aliases/BackendOptions.md | 21 + .../backend_barretenberg/typedoc-sidebar.cjs | 4 + .../reference/NoirJS/noir_js/.nojekyll | 1 + .../reference/NoirJS/noir_js/classes/Noir.md | 52 + .../reference/NoirJS/noir_js/functions/and.md | 22 + .../NoirJS/noir_js/functions/blake2s256.md | 21 + .../functions/ecdsa_secp256k1_verify.md | 28 + .../functions/ecdsa_secp256r1_verify.md | 28 + .../NoirJS/noir_js/functions/keccak256.md | 21 + .../NoirJS/noir_js/functions/sha256.md | 21 + .../reference/NoirJS/noir_js/functions/xor.md | 22 + .../reference/NoirJS/noir_js/index.md | 49 + .../noir_js/type-aliases/ErrorWithPayload.md | 15 + .../type-aliases/ForeignCallHandler.md | 24 + .../noir_js/type-aliases/ForeignCallInput.md | 9 + .../noir_js/type-aliases/ForeignCallOutput.md | 9 + .../NoirJS/noir_js/type-aliases/WitnessMap.md | 9 + .../NoirJS/noir_js/typedoc-sidebar.cjs | 4 + .../reference/NoirJS/noir_wasm/.nojekyll | 1 + .../NoirJS/noir_wasm/functions/compile.md | 51 + .../noir_wasm/functions/compile_contract.md | 51 + .../noir_wasm/functions/createFileManager.md | 21 + .../functions/inflateDebugSymbols.md | 21 + .../reference/NoirJS/noir_wasm/index.md | 49 + .../NoirJS/noir_wasm/typedoc-sidebar.cjs | 4 + .../version-v0.32.0/reference/_category_.json | 5 + .../reference/debugger/_category_.json | 6 + .../debugger/debugger_known_limitations.md | 59 + .../reference/debugger/debugger_repl.md | 360 +++ .../reference/debugger/debugger_vscode.md | 82 + .../reference/nargo_commands.md | 271 ++ .../version-v0.32.0/tooling/debugger.md | 26 + .../tooling/language_server.md | 43 + .../version-v0.32.0/tooling/testing.md | 79 + .../version-v0.32.0/tutorials/noirjs_app.md | 327 +++ .../version-v0.32.0-sidebars.json | 93 + noir_stdlib/src/collections/mod.nr | 1 + noir_stdlib/src/collections/umap.nr | 469 ++++ noir_stdlib/src/compat.nr | 18 +- noir_stdlib/src/embedded_curve_ops.nr | 20 +- noir_stdlib/src/field/bn254.nr | 2 +- noir_stdlib/src/field/mod.nr | 11 +- noir_stdlib/src/hash/keccak.nr | 142 ++ noir_stdlib/src/hash/mod.nr | 197 +- noir_stdlib/src/meta/mod.nr | 10 + noir_stdlib/src/meta/quoted.nr | 4 + noir_stdlib/src/meta/trait_constraint.nr | 20 + noir_stdlib/src/meta/trait_def.nr | 4 + scripts/redo-typo-pr.sh | 13 +- .../attribute_args/Nargo.toml | 7 + .../attribute_args/src/main.nr | 20 + .../comptime_trait_constraint/Nargo.toml | 7 + .../comptime_trait_constraint/src/main.nr | 39 + .../comptime_traits/src/main.nr | 21 + .../function_attribute/Nargo.toml | 7 + .../function_attribute/src/main.nr | 18 + .../macros_in_comptime/Nargo.toml | 7 + .../macros_in_comptime/src/main.nr | 49 + .../regression_4635/src/main.nr | 4 +- .../regression_5428/Nargo.toml | 6 + .../regression_5428/src/main.nr | 9 + .../trait_as_constraint/Nargo.toml | 7 + .../trait_as_constraint/src/main.nr | 9 + .../trait_attribute/Nargo.toml | 7 + .../trait_attribute/src/main.nr | 19 + .../trait_call_full_path/Nargo.toml | 6 + .../trait_call_full_path/src/main.nr | 20 + .../trait_method_mut_self/Nargo.toml | 0 .../trait_method_mut_self/Prover.toml | 0 .../trait_method_mut_self/src/main.nr | 0 .../turbofish_call_func_diff_types/Nargo.toml | 0 .../Prover.toml | 0 .../src/main.nr | 0 .../compile_success_empty/unquote/Nargo.toml | 7 + .../compile_success_empty/unquote/src/main.nr | 4 + .../check_uncostrained_regression/Nargo.toml | 7 + .../check_uncostrained_regression/src/main.nr | 27 + .../bench_ecdsa_secp256k1/Nargo.toml | 7 + .../bench_ecdsa_secp256k1/Prover.toml | 169 ++ .../bench_ecdsa_secp256k1/src/main.nr | 6 + .../comptime_slice_equality/Nargo.toml | 7 + .../comptime_slice_equality/src/main.nr | 6 + .../embedded_curve_ops/src/main.nr | 6 +- .../poseidon_bn254_hash_width_3/Nargo.toml | 9 + .../poseidon_bn254_hash_width_3/Prover.toml | 4 + .../poseidon_bn254_hash_width_3/src/main.nr | 9 + .../regression_5045/src/main.nr | 2 +- .../execution_success/schnorr/src/main.nr | 8 +- .../simple_shield/src/main.nr | 7 +- .../execution_success/uhashmap/Nargo.toml | 6 + .../execution_success/uhashmap/Prover.toml | 26 + .../execution_success/uhashmap/src/main.nr | 352 +++ .../embedded_curve_ops/src/main.nr | 10 +- test_programs/rebuild.sh | 2 +- tooling/debugger/Cargo.toml | 2 +- tooling/debugger/ignored-tests.txt | 14 +- tooling/debugger/src/context.rs | 691 ++++-- tooling/debugger/src/dap.rs | 48 +- tooling/debugger/src/lib.rs | 17 +- tooling/debugger/src/repl.rs | 152 +- tooling/debugger/tests/debug.rs | 2 +- tooling/fuzzer/src/dictionary/mod.rs | 8 +- tooling/lsp/src/lib.rs | 148 +- tooling/lsp/src/notifications/mod.rs | 284 ++- tooling/lsp/src/requests/code_lens_request.rs | 57 +- tooling/lsp/src/requests/document_symbol.rs | 746 ++++++ tooling/lsp/src/requests/goto_declaration.rs | 6 +- tooling/lsp/src/requests/goto_definition.rs | 46 +- tooling/lsp/src/requests/hover.rs | 641 +++++ tooling/lsp/src/requests/inlay_hint.rs | 929 +++++++ tooling/lsp/src/requests/mod.rs | 205 +- tooling/lsp/src/requests/profile_run.rs | 9 +- tooling/lsp/src/requests/references.rs | 103 +- tooling/lsp/src/requests/rename.rs | 74 +- tooling/lsp/src/requests/test_run.rs | 14 +- tooling/lsp/src/requests/tests.rs | 10 +- tooling/lsp/src/types.rs | 16 +- .../test_programs/document_symbol/Nargo.toml | 6 + .../test_programs/document_symbol/src/main.nr | 26 + .../go_to_definition/src/main.nr | 9 + .../lsp/test_programs/inlay_hints/Nargo.toml | 6 + .../lsp/test_programs/inlay_hints/src/main.nr | 94 + .../test_programs/rename_function/src/main.nr | 13 + .../lsp/test_programs/workspace/Nargo.toml | 2 + .../test_programs/workspace/one/Nargo.toml | 4 + .../test_programs/workspace/one/src/lib.nr | 25 + .../test_programs/workspace/two/Nargo.toml | 7 + .../test_programs/workspace/two/src/lib.nr | 53 + .../test_programs/workspace/two/src/other.nr | 2 + tooling/nargo/src/package.rs | 2 + tooling/nargo_cli/build.rs | 107 +- tooling/nargo_cli/src/cli/check_cmd.rs | 12 +- tooling/nargo_cli/src/cli/compile_cmd.rs | 30 +- tooling/nargo_cli/src/cli/debug_cmd.rs | 59 +- tooling/nargo_cli/src/cli/export_cmd.rs | 1 - tooling/nargo_cli/src/cli/info_cmd.rs | 12 +- tooling/nargo_cli/src/cli/mod.rs | 1 - tooling/nargo_cli/src/cli/test_cmd.rs | 2 - tooling/nargo_cli/tests/stdlib-tests.rs | 3 +- tooling/nargo_fmt/src/rewrite/imports.rs | 3 + tooling/nargo_fmt/tests/expected/use_super.nr | 5 + tooling/nargo_fmt/tests/input/use_super.nr | 7 + tooling/nargo_toml/Cargo.toml | 1 + tooling/nargo_toml/src/errors.rs | 3 + tooling/nargo_toml/src/lib.rs | 28 + tooling/nargo_toml/src/semver.rs | 6 + tooling/noir_codegen/package.json | 2 +- tooling/noir_js/package.json | 2 +- .../noir_js_backend_barretenberg/package.json | 2 +- tooling/noir_js_types/package.json | 2 +- tooling/noirc_abi_wasm/package.json | 2 +- .../profiler/src/cli/gates_flamegraph_cmd.rs | 8 +- tooling/profiler/src/gates_provider.rs | 12 +- yarn.lock | 818 +----- 364 files changed, 21937 insertions(+), 10358 deletions(-) create mode 100644 compiler/noirc_frontend/src/elaborator/comptime.rs create mode 100644 compiler/noirc_frontend/src/elaborator/trait_impls.rs create mode 100644 compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs delete mode 100644 compiler/noirc_frontend/src/hir/comptime/scan.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/functions.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/globals.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/impls.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/resolver.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/structs.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/traits.rs delete mode 100644 compiler/noirc_frontend/src/hir/resolution/type_aliases.rs delete mode 100644 compiler/noirc_frontend/src/hir/type_check/expr.rs delete mode 100644 compiler/noirc_frontend/src/hir/type_check/stmt.rs rename docs/docs/noir/concepts/{data_bus.md => data_bus.mdx} (88%) rename docs/docs/noir/concepts/{oracles.md => oracles.mdx} (78%) create mode 100644 docs/versioned_docs/version-v0.32.0/explainers/explainer-oracle.md create mode 100644 docs/versioned_docs/version-v0.32.0/explainers/explainer-recursion.md create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/installation/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/installation/other_install_methods.md create mode 100644 docs/versioned_docs/version-v0.32.0/getting_started/tooling/noir_codegen.md create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/debugger/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_the_repl.md create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_vs_code.md create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/how-to-oracles.md create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/how-to-recursion.md create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/how-to-solidity-verifier.md create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/merkle-proof.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/how_to/using-devcontainers.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/index.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/migration_notes.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/assert.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/comments.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/control_flow.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_bus.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/arrays.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/booleans.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/fields.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/function_types.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/integers.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/references.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/slices.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/strings.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/structs.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/tuples.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/functions.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/generics.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/globals.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/lambdas.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/mutability.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/ops.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/oracles.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/shadowing.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/traits.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/concepts/unconstrained.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/crates_and_packages.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/dependencies.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/modules.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/workspaces.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/bigint.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/black_box_fns.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/bn254.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/boundedvec.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/hashmap.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/vec.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ciphers.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ec_primitives.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/eddsa.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/hashes.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/schnorr.mdx create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/is_unconstrained.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/logging.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/merkle_trees.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/options.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/recursion.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/traits.md create mode 100644 docs/versioned_docs/version-v0.32.0/noir/standard_library/zeroed.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/.nojekyll create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/.nojekyll create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/classes/Noir.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/and.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/blake2s256.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/keccak256.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/sha256.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/xor.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ErrorWithPayload.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/.nojekyll create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile_contract.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/createFileManager.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/index.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs create mode 100644 docs/versioned_docs/version-v0.32.0/reference/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/reference/debugger/_category_.json create mode 100644 docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_known_limitations.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_repl.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_vscode.md create mode 100644 docs/versioned_docs/version-v0.32.0/reference/nargo_commands.md create mode 100644 docs/versioned_docs/version-v0.32.0/tooling/debugger.md create mode 100644 docs/versioned_docs/version-v0.32.0/tooling/language_server.md create mode 100644 docs/versioned_docs/version-v0.32.0/tooling/testing.md create mode 100644 docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md create mode 100644 docs/versioned_sidebars/version-v0.32.0-sidebars.json create mode 100644 noir_stdlib/src/collections/umap.nr create mode 100644 noir_stdlib/src/hash/keccak.nr create mode 100644 noir_stdlib/src/meta/quoted.nr create mode 100644 noir_stdlib/src/meta/trait_constraint.nr create mode 100644 noir_stdlib/src/meta/trait_def.nr create mode 100644 test_programs/compile_success_empty/attribute_args/Nargo.toml create mode 100644 test_programs/compile_success_empty/attribute_args/src/main.nr create mode 100644 test_programs/compile_success_empty/comptime_trait_constraint/Nargo.toml create mode 100644 test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr create mode 100644 test_programs/compile_success_empty/function_attribute/Nargo.toml create mode 100644 test_programs/compile_success_empty/function_attribute/src/main.nr create mode 100644 test_programs/compile_success_empty/macros_in_comptime/Nargo.toml create mode 100644 test_programs/compile_success_empty/macros_in_comptime/src/main.nr create mode 100644 test_programs/compile_success_empty/regression_5428/Nargo.toml create mode 100644 test_programs/compile_success_empty/regression_5428/src/main.nr create mode 100644 test_programs/compile_success_empty/trait_as_constraint/Nargo.toml create mode 100644 test_programs/compile_success_empty/trait_as_constraint/src/main.nr create mode 100644 test_programs/compile_success_empty/trait_attribute/Nargo.toml create mode 100644 test_programs/compile_success_empty/trait_attribute/src/main.nr create mode 100644 test_programs/compile_success_empty/trait_call_full_path/Nargo.toml create mode 100644 test_programs/compile_success_empty/trait_call_full_path/src/main.nr rename test_programs/{execution_success => compile_success_empty}/trait_method_mut_self/Nargo.toml (100%) rename test_programs/{execution_success => compile_success_empty}/trait_method_mut_self/Prover.toml (100%) rename test_programs/{execution_success => compile_success_empty}/trait_method_mut_self/src/main.nr (100%) rename test_programs/{execution_success => compile_success_empty}/turbofish_call_func_diff_types/Nargo.toml (100%) rename test_programs/{execution_success => compile_success_empty}/turbofish_call_func_diff_types/Prover.toml (100%) rename test_programs/{execution_success => compile_success_empty}/turbofish_call_func_diff_types/src/main.nr (100%) create mode 100644 test_programs/compile_success_empty/unquote/Nargo.toml create mode 100644 test_programs/compile_success_empty/unquote/src/main.nr create mode 100644 test_programs/compile_success_no_bug/check_uncostrained_regression/Nargo.toml create mode 100644 test_programs/compile_success_no_bug/check_uncostrained_regression/src/main.nr create mode 100644 test_programs/execution_success/bench_ecdsa_secp256k1/Nargo.toml create mode 100644 test_programs/execution_success/bench_ecdsa_secp256k1/Prover.toml create mode 100644 test_programs/execution_success/bench_ecdsa_secp256k1/src/main.nr create mode 100644 test_programs/execution_success/comptime_slice_equality/Nargo.toml create mode 100644 test_programs/execution_success/comptime_slice_equality/src/main.nr create mode 100644 test_programs/execution_success/poseidon_bn254_hash_width_3/Nargo.toml create mode 100644 test_programs/execution_success/poseidon_bn254_hash_width_3/Prover.toml create mode 100644 test_programs/execution_success/poseidon_bn254_hash_width_3/src/main.nr create mode 100644 test_programs/execution_success/uhashmap/Nargo.toml create mode 100644 test_programs/execution_success/uhashmap/Prover.toml create mode 100644 test_programs/execution_success/uhashmap/src/main.nr create mode 100644 tooling/lsp/src/requests/document_symbol.rs create mode 100644 tooling/lsp/src/requests/hover.rs create mode 100644 tooling/lsp/src/requests/inlay_hint.rs create mode 100644 tooling/lsp/test_programs/document_symbol/Nargo.toml create mode 100644 tooling/lsp/test_programs/document_symbol/src/main.nr create mode 100644 tooling/lsp/test_programs/inlay_hints/Nargo.toml create mode 100644 tooling/lsp/test_programs/inlay_hints/src/main.nr create mode 100644 tooling/lsp/test_programs/workspace/Nargo.toml create mode 100644 tooling/lsp/test_programs/workspace/one/Nargo.toml create mode 100644 tooling/lsp/test_programs/workspace/one/src/lib.nr create mode 100644 tooling/lsp/test_programs/workspace/two/Nargo.toml create mode 100644 tooling/lsp/test_programs/workspace/two/src/lib.nr create mode 100644 tooling/lsp/test_programs/workspace/two/src/other.nr create mode 100644 tooling/nargo_fmt/tests/expected/use_super.nr create mode 100644 tooling/nargo_fmt/tests/input/use_super.nr diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 18e53f2bd3b..379f006b5c0 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -e44ef7042c87d3c78a14413ad7d54e4ed642ad89 +a26419f00f5f082a9ed856346addf6276fbdb4d7 diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index 9f46e6f98e8..5a09a0cf444 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -509,6 +509,59 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh + external-repo-checks: + needs: [build-nargo] + runs-on: ubuntu-22.04 + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + project: + # - { repo: AztecProtocol/aztec-nr, path: ./ } + # - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } + # Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml` + #- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits } + - { repo: zac-williamson/noir-edwards, path: ./, ref: 0016ce82cd58b6ebb0c43c271725590bcff4e755 } + # TODO: Enable these once they're passing against master again. + # - { repo: zac-williamson/noir-bignum, path: ./, ref: 030c2acce1e6b97c44a3bbbf3429ed96f20d72d3 } + # - { repo: vlayer-xyz/monorepo, path: ./, ref: ee46af88c025863872234eb05d890e1e447907cb } + # - { repo: hashcloak/noir-bigint, path: ./, ref: 940ddba3a5201b508e7b37a2ef643551afcf5ed8 } + + name: Check external repo - ${{ matrix.project.repo }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + repository: ${{ matrix.project.repo }} + path: test-repo + ref: ${{ matrix.project.ref }} + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Remove requirements on compiler version + working-directory: ./test-repo + run: | + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + sed -i '/^compiler_version/d' ./**/Nargo.toml + + - name: Run nargo check + working-directory: ./test-repo/${{ matrix.project.path }} + run: nargo check + # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. tests-end: diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a30d3e16ba7..8f269310c5d 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.31.0", - "acvm-repo": "0.47.0" + ".": "0.32.0", + "acvm-repo": "0.48.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 30678c05fb1..1e32f8364a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,137 @@ # Changelog +## [0.32.0](https://github.com/noir-lang/noir/compare/v0.31.0...v0.32.0) (2024-07-18) + + +### ⚠ BREAKING CHANGES + +* constant inputs for blackbox (https://github.com/AztecProtocol/aztec-packages/pull/7222) +* error on too large integer value ([#5371](https://github.com/noir-lang/noir/issues/5371)) +* rename struct-specific TypeDefinition -> StructDefinition ([#5356](https://github.com/noir-lang/noir/issues/5356)) +* **frontend:** Explicit numeric generics and type kinds ([#5155](https://github.com/noir-lang/noir/issues/5155)) + +### Features + +* `mod.nr` entrypoint ([#5039](https://github.com/noir-lang/noir/issues/5039)) ([076fe0a](https://github.com/noir-lang/noir/commit/076fe0a11869f6975d214c5b9a5ed1e8f7cdbded)) +* `static_assert` builtin ([#5342](https://github.com/noir-lang/noir/issues/5342)) ([ef44270](https://github.com/noir-lang/noir/commit/ef4427051eebf323462cbb1fed205e8b555712a1)) +* Add `map`, `fold`, `reduce`, `any`, and `all` for slices ([#5331](https://github.com/noir-lang/noir/issues/5331)) ([03e25b4](https://github.com/noir-lang/noir/commit/03e25b4577349859c85203fadafc3c63aa4e4dd0)) +* Add CLI argument for debugging comptime blocks ([#5192](https://github.com/noir-lang/noir/issues/5192)) ([0b74a18](https://github.com/noir-lang/noir/commit/0b74a18537b84a0f774d54518fd938f8c11e1baf)) +* Add comptime support for `modulus_*` compiler builtins ([#5530](https://github.com/noir-lang/noir/issues/5530)) ([5bbce79](https://github.com/noir-lang/noir/commit/5bbce7977f72b07336bc8ef09f6acff687f1644a)) +* Add debug codelens action ([#5474](https://github.com/noir-lang/noir/issues/5474)) ([6bcdac4](https://github.com/noir-lang/noir/commit/6bcdac428a48083c9b0d85d42b4d8635a182fda1)) +* Add fuzzer for Noir programs ([#5251](https://github.com/noir-lang/noir/issues/5251)) ([e100017](https://github.com/noir-lang/noir/commit/e1000176a31140b2abd79c47653cbc4bb1a6808a)) +* Add gate profiler for noir circuits (https://github.com/AztecProtocol/aztec-packages/pull/7004) ([083070e](https://github.com/noir-lang/noir/commit/083070e83e916b68799358b119a9f843223f2686)) +* Add more slice methods to the stdlib ([#5424](https://github.com/noir-lang/noir/issues/5424)) ([4020e77](https://github.com/noir-lang/noir/commit/4020e77145b99861b8bd6027a6823ccf2c39271f)) +* Add opcodes flamegraph and refactor gates flamegraph (https://github.com/AztecProtocol/aztec-packages/pull/7282) ([32029f9](https://github.com/noir-lang/noir/commit/32029f91f6aae4d2f6b08b4ea40481f5837e50bc)) +* Add support for fieldable in events (https://github.com/AztecProtocol/aztec-packages/pull/7310) ([3f71169](https://github.com/noir-lang/noir/commit/3f71169ef4ef8ea8a3dcf355bf360195bfa6772c)) +* Add support for usage of `super` in import paths ([#5502](https://github.com/noir-lang/noir/issues/5502)) ([256509e](https://github.com/noir-lang/noir/commit/256509e5083895b6115b110aedd5a97bd9e74fc0)) +* Add support for wildcard types ([#5275](https://github.com/noir-lang/noir/issues/5275)) ([7445efb](https://github.com/noir-lang/noir/commit/7445efb05165bf7df2f9dfe325abbc42f839364c)) +* Add TraitConstraint type ([#5499](https://github.com/noir-lang/noir/issues/5499)) ([30cb65a](https://github.com/noir-lang/noir/commit/30cb65a12668d192f8da940c32961210a05a962f)) +* Add unquote function ([#5497](https://github.com/noir-lang/noir/issues/5497)) ([2947aba](https://github.com/noir-lang/noir/commit/2947ababcbcc7cbe5d99f6a8ed0dc6ad756ebeb8)) +* Allow arguments to attribute functions ([#5494](https://github.com/noir-lang/noir/issues/5494)) ([a33cafc](https://github.com/noir-lang/noir/commit/a33cafcb7e175ad8b3b80b8c9419a32e009ec702)) +* Allow comptime attributes on traits & functions ([#5496](https://github.com/noir-lang/noir/issues/5496)) ([b59a29e](https://github.com/noir-lang/noir/commit/b59a29e5b246121a4d81e4894a4b10f5df4dd5cf)) +* Apply `no_predicates` in stdlib ([#5454](https://github.com/noir-lang/noir/issues/5454)) ([24d26c0](https://github.com/noir-lang/noir/commit/24d26c05705fabca81b19d789203ebb6fc22ff32)) +* Build releases for `aarch64-unknown-linux-gnu` target ([#5289](https://github.com/noir-lang/noir/issues/5289)) ([f35614a](https://github.com/noir-lang/noir/commit/f35614a43cf8c5cfb244d9f6ffc9d63282a63e6d)) +* Build simple dictionary from inspecting ACIR program ([#5264](https://github.com/noir-lang/noir/issues/5264)) ([508e677](https://github.com/noir-lang/noir/commit/508e677cf2c66ac3427932a18f1661f5f4dc4202)) +* Constant inputs for blackbox (https://github.com/AztecProtocol/aztec-packages/pull/7222) ([fb97bb9](https://github.com/noir-lang/noir/commit/fb97bb9b795c9d7af395b82fd6f0ea8111d59c11)) +* Detect subgraphs that are completely independent from inputs or outputs ([#5402](https://github.com/noir-lang/noir/issues/5402)) ([7ea83a9](https://github.com/noir-lang/noir/commit/7ea83a9de4d3096d27e79faf5d8081b9e9108c4a)) +* Disable nargo color output if stderr is tty ([#5346](https://github.com/noir-lang/noir/issues/5346)) ([554dd6b](https://github.com/noir-lang/noir/commit/554dd6b01b1d3417448d8ecc95165fd0c3ca36e9)) +* Error on too large integer value ([#5371](https://github.com/noir-lang/noir/issues/5371)) ([0c4fffa](https://github.com/noir-lang/noir/commit/0c4fffa86f6605e8f16f973ad42c6927a03fc3cc)) +* **frontend:** Explicit numeric generics and type kinds ([#5155](https://github.com/noir-lang/noir/issues/5155)) ([d4e03d0](https://github.com/noir-lang/noir/commit/d4e03d07bb00d1ba0f8f43bd0dd9e967a10a28b9)) +* **frontend:** Where clause on impl ([#5320](https://github.com/noir-lang/noir/issues/5320)) ([cf938bc](https://github.com/noir-lang/noir/commit/cf938bc06b7015dae94847f146dc7fd38055f064)) +* Handle ACIR calls in the debugger ([#5051](https://github.com/noir-lang/noir/issues/5051)) ([0541568](https://github.com/noir-lang/noir/commit/0541568b4c209927a70778b895e8f1e50d9b6543)) +* Implement comptime support for `array_len` builtin ([#5272](https://github.com/noir-lang/noir/issues/5272)) ([c91186a](https://github.com/noir-lang/noir/commit/c91186a5c0d9e84767f160e6acd63672b23e8f52)) +* Implement comptime support for `as_slice` builtin ([#5276](https://github.com/noir-lang/noir/issues/5276)) ([9db65d8](https://github.com/noir-lang/noir/commit/9db65d8706ac8b67921f2a73163ab8bee3dfb4e8)) +* Implement trait dispatch in the comptime interpreter ([#5376](https://github.com/noir-lang/noir/issues/5376)) ([8aa5b2e](https://github.com/noir-lang/noir/commit/8aa5b2e4cc69ca6ac1077e8e08c28e9cb30ffb51)) +* Insert trait impls into the program from type annotations ([#5327](https://github.com/noir-lang/noir/issues/5327)) ([efdd818](https://github.com/noir-lang/noir/commit/efdd818a1fc52f31bda4e4519a4ba42887cec87a)) +* Let `should_fail_with` check that the failure reason contains the expected message ([#5319](https://github.com/noir-lang/noir/issues/5319)) ([cb9db55](https://github.com/noir-lang/noir/commit/cb9db55dcf87a45356af362f6f90681dd0e00212)) +* Let LSP always work in a Noir workspace if there's any ([#5461](https://github.com/noir-lang/noir/issues/5461)) ([e0d7833](https://github.com/noir-lang/noir/commit/e0d78334e4b7c7cdd2e4778c3f13dd12ddbef59c)) +* Lsp "find all references" ([#5395](https://github.com/noir-lang/noir/issues/5395)) ([ce1994c](https://github.com/noir-lang/noir/commit/ce1994ca87cb47ec22aa95e566a4e18f0c931ea1)) +* Lsp "go to definition" for modules ([#5406](https://github.com/noir-lang/noir/issues/5406)) ([3e7f1f2](https://github.com/noir-lang/noir/commit/3e7f1f28e5836b164bebdc3bad20d8d91dccd211)) +* LSP document symbol ([#5532](https://github.com/noir-lang/noir/issues/5532)) ([1fabcde](https://github.com/noir-lang/noir/commit/1fabcde195f3965c6b8701eb4e1fed49ec1bde4b)) +* LSP hover ([#5491](https://github.com/noir-lang/noir/issues/5491)) ([010c835](https://github.com/noir-lang/noir/commit/010c835e4ebfdf49ea4e9326abafcdeb587153b6)) +* LSP inlay hints for let and global ([#5510](https://github.com/noir-lang/noir/issues/5510)) ([43f5b8d](https://github.com/noir-lang/noir/commit/43f5b8d8eba5011b163e30a09ad743f893aa841a)) +* Lsp rename struct ([#5380](https://github.com/noir-lang/noir/issues/5380)) ([ee8b0cd](https://github.com/noir-lang/noir/commit/ee8b0cdbc919fbf924c5d42067c0f18db8def2bf)) +* Lsp rename/find-all-references for globals ([#5415](https://github.com/noir-lang/noir/issues/5415)) ([fa9b444](https://github.com/noir-lang/noir/commit/fa9b4446f96155fc08d8087444fc856e86e7ab62)) +* Lsp rename/find-all-references for local variables ([#5439](https://github.com/noir-lang/noir/issues/5439)) ([bb6913a](https://github.com/noir-lang/noir/commit/bb6913ac53620fabd73e24ca1a2b1369225903ec)) +* Lsp rename/find-all-references for struct members ([#5443](https://github.com/noir-lang/noir/issues/5443)) ([a6d213d](https://github.com/noir-lang/noir/commit/a6d213d41aa5a8e31a1d6210f2ea98a501b8f67d)) +* Lsp rename/find-all-references for traits ([#5409](https://github.com/noir-lang/noir/issues/5409)) ([bf3a75a](https://github.com/noir-lang/noir/commit/bf3a75a3f9c6926baaa1408767dd929de2f8a8f9)) +* Lsp rename/find-all-references for type aliases ([#5414](https://github.com/noir-lang/noir/issues/5414)) ([24c621f](https://github.com/noir-lang/noir/commit/24c621fa96783373ab81da66cb6076e130c4a3a5)) +* **lsp:** Allow function rename ([#4294](https://github.com/noir-lang/noir/issues/4294)) ([3d86dc6](https://github.com/noir-lang/noir/commit/3d86dc6118d083c686b1061a52eb4f113e9a9f7c)) +* Make macros operate on token streams instead of AST nodes ([#5301](https://github.com/noir-lang/noir/issues/5301)) ([7689d59](https://github.com/noir-lang/noir/commit/7689d59aa12003994cea6a3ff4bf87484e41aa6b)) +* **nargo:** Default expression width field in `Nargo.toml` ([#5505](https://github.com/noir-lang/noir/issues/5505)) ([dea6b32](https://github.com/noir-lang/noir/commit/dea6b323fe8db636f5991cfc206ea9222addca30)) +* **optimization:** Deduplicate more instructions ([#5457](https://github.com/noir-lang/noir/issues/5457)) ([c47242a](https://github.com/noir-lang/noir/commit/c47242ab624f4a1d564b3b62bc84a1b4bb5bd549)) +* Prefix operator overload trait dispatch ([#5423](https://github.com/noir-lang/noir/issues/5423)) ([a3bb09e](https://github.com/noir-lang/noir/commit/a3bb09ebe2df473d4a34a34fbfc3966ffbc630cb)) +* Remove duplicated array reads at constant indices ([#5445](https://github.com/noir-lang/noir/issues/5445)) ([82a67a0](https://github.com/noir-lang/noir/commit/82a67a0e9554afeadb1839e6511794b41960f241)) +* Remove redundant `EnableSideEffects` instructions ([#5440](https://github.com/noir-lang/noir/issues/5440)) ([e153ecb](https://github.com/noir-lang/noir/commit/e153ecbe068f5974d5836aedebb8a41c5620d5f7)) +* Rename struct-specific TypeDefinition -> StructDefinition ([#5356](https://github.com/noir-lang/noir/issues/5356)) ([7ffccf7](https://github.com/noir-lang/noir/commit/7ffccf7f060aee30b08ef7fda75d8695f047abd8)) +* Run `comptime` code from annotations on a type definition ([#5256](https://github.com/noir-lang/noir/issues/5256)) ([6cbe6a0](https://github.com/noir-lang/noir/commit/6cbe6a0c830b2992666e0f9bdbc8f66ec41eed84)) +* Skip reading values immediately after it being written into an array ([#5449](https://github.com/noir-lang/noir/issues/5449)) ([141ecdd](https://github.com/noir-lang/noir/commit/141ecddf79b27244a52097577395c7b41cd4d331)) +* **stdlib:** Update stdlib to use explicit numeric generics ([#5306](https://github.com/noir-lang/noir/issues/5306)) ([8456185](https://github.com/noir-lang/noir/commit/8456185078c90cfcb8e63caf147ea6cdbbd786af)) +* Sync from aztec-packages ([#5347](https://github.com/noir-lang/noir/issues/5347)) ([47b621f](https://github.com/noir-lang/noir/commit/47b621fcb8a971b353ce5bda3a506da5504ae9a3)) +* Sync from aztec-packages ([#5377](https://github.com/noir-lang/noir/issues/5377)) ([7b77bbf](https://github.com/noir-lang/noir/commit/7b77bbfc19c51829814149e623257a3424d8e8c2)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7257) ([32029f9](https://github.com/noir-lang/noir/commit/32029f91f6aae4d2f6b08b4ea40481f5837e50bc)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7288) ([322f863](https://github.com/noir-lang/noir/commit/322f86392a899fa6e1765cb30b72768211605a9f)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7308) ([322f863](https://github.com/noir-lang/noir/commit/322f86392a899fa6e1765cb30b72768211605a9f)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7332) ([3f71169](https://github.com/noir-lang/noir/commit/3f71169ef4ef8ea8a3dcf355bf360195bfa6772c)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7352) ([98e72ac](https://github.com/noir-lang/noir/commit/98e72acd72e9a01376cf69d20c539ba9dbe0942b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7392) ([fb97bb9](https://github.com/noir-lang/noir/commit/fb97bb9b795c9d7af395b82fd6f0ea8111d59c11)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7400) ([fb97bb9](https://github.com/noir-lang/noir/commit/fb97bb9b795c9d7af395b82fd6f0ea8111d59c11)) +* Unquote multiple items from annotations ([#5441](https://github.com/noir-lang/noir/issues/5441)) ([be8eac6](https://github.com/noir-lang/noir/commit/be8eac6ff44dac442df9b09de1fd2269a6371d56)) +* Use runtime loops for brillig array initialization ([#5243](https://github.com/noir-lang/noir/issues/5243)) ([0bd22bb](https://github.com/noir-lang/noir/commit/0bd22bb460ff0bf134ce3acf79e018c1e360d31c)) + + +### Bug Fixes + +* Account for the expected kind when resolving turbofish generics ([#5448](https://github.com/noir-lang/noir/issues/5448)) ([82c335d](https://github.com/noir-lang/noir/commit/82c335d3e36365695eccc1c4af63e58dd0633328)) +* Add more thorough check for whether a type is valid when passing it from constrained code to unconstrained code ([#5009](https://github.com/noir-lang/noir/issues/5009)) ([318314d](https://github.com/noir-lang/noir/commit/318314d6dd35674328f534ebd882d4b0e66eab24)) +* Address compiler warnings coming from stdlib ([#5351](https://github.com/noir-lang/noir/issues/5351)) ([758a905](https://github.com/noir-lang/noir/commit/758a905fc740971c995151bfb5f997bcc080397c)) +* Allow importing notes from other contracts and inject them in the macros (https://github.com/AztecProtocol/aztec-packages/pull/7349) ([98e72ac](https://github.com/noir-lang/noir/commit/98e72acd72e9a01376cf69d20c539ba9dbe0942b)) +* Avoid duplicating constant arrays ([#5287](https://github.com/noir-lang/noir/issues/5287)) ([3ef3645](https://github.com/noir-lang/noir/commit/3ef36458fef36b2a2f6cf99b35a43339f3721b27)) +* Avoid panic in type system ([#5332](https://github.com/noir-lang/noir/issues/5332)) ([52d48ff](https://github.com/noir-lang/noir/commit/52d48ff1cf1415fa87fbaf76249b2e0d042de8bd)) +* Avoid unnecessarily splitting expressions with multiplication terms with a shared term ([#5291](https://github.com/noir-lang/noir/issues/5291)) ([19884f1](https://github.com/noir-lang/noir/commit/19884f161dfc7d7ce75dd2c404b8ef39cdad2240)) +* Change panic to error in interpreter ([#5446](https://github.com/noir-lang/noir/issues/5446)) ([d44f882](https://github.com/noir-lang/noir/commit/d44f882be094bf492b1742370fd3896b0c371f59)) +* Complete call stacks with no_predicates ([#5418](https://github.com/noir-lang/noir/issues/5418)) ([df73fe2](https://github.com/noir-lang/noir/commit/df73fe2f345422516bfa01462c0c76d3b924b772)) +* Correct range for overlfowing/underflowing integer assignment ([#5416](https://github.com/noir-lang/noir/issues/5416)) ([30c50f5](https://github.com/noir-lang/noir/commit/30c50f52a6d58163e39006b73f4eb5003afc239b)) +* Correctly detect signed/unsigned integer overflows/underflows ([#5375](https://github.com/noir-lang/noir/issues/5375)) ([0603bd3](https://github.com/noir-lang/noir/commit/0603bd39bff1183725e9aeeaba678c421c7b1daf)) +* **docs:** Fix broken docs link to gihtub ([#5398](https://github.com/noir-lang/noir/issues/5398)) ([70ebf60](https://github.com/noir-lang/noir/commit/70ebf607e566a95ff7eb2c7a0eee7c36465ba5b4)) +* Don't benchmark the "prove" command as it doesn't exist anymore ([#5323](https://github.com/noir-lang/noir/issues/5323)) ([3bb3b03](https://github.com/noir-lang/noir/commit/3bb3b03aedab9c7abfeb3d3141e04b07b7aeeffb)) +* Don't lazily elaborate functions ([#5282](https://github.com/noir-lang/noir/issues/5282)) ([0ea608f](https://github.com/noir-lang/noir/commit/0ea608f10bdeb26df7dfc17b1a0bad5db1967be8)) +* Don't panic when using undefined variables in the interpreter ([#5381](https://github.com/noir-lang/noir/issues/5381)) ([94d209a](https://github.com/noir-lang/noir/commit/94d209acb70064d5f8a5d427bade18d3cd975be0)) +* Don't type error when calling certain trait impls in the interpreter ([#5471](https://github.com/noir-lang/noir/issues/5471)) ([299703c](https://github.com/noir-lang/noir/commit/299703cf4b87a84257f48f059eb58135ad36265d)) +* Error on empty function bodies ([#5519](https://github.com/noir-lang/noir/issues/5519)) ([6a7f593](https://github.com/noir-lang/noir/commit/6a7f593a04ee1caefd6a19a5cba1c0dbeee22ee1)) +* Error when a local function is called in a comptime context ([#5334](https://github.com/noir-lang/noir/issues/5334)) ([7cd4a4d](https://github.com/noir-lang/noir/commit/7cd4a4d1cde4446c8ace7439ce9f8d42ded70869)) +* Fix incorrect return type being applied to stdlib functions `modulus_be_bytes()`, `modulus_be_bits()`, etc. ([#5278](https://github.com/noir-lang/noir/issues/5278)) ([91a9b72](https://github.com/noir-lang/noir/commit/91a9b725cdb75c08cde888f49e7b8d11257e5de6)) +* Fix issue with unresolved results ([#5453](https://github.com/noir-lang/noir/issues/5453)) ([c4154cb](https://github.com/noir-lang/noir/commit/c4154cbb0e8e56d351d012eb284c34424821e25a)) +* Fix tokenization of unquoted types in macros ([#5326](https://github.com/noir-lang/noir/issues/5326)) ([6673c8b](https://github.com/noir-lang/noir/commit/6673c8b7068a3cd5d5914e1b0ecb9457a7e26bab)) +* Fix usage of `#[abi(tag)]` attribute with elaborator ([#5298](https://github.com/noir-lang/noir/issues/5298)) ([64dd48a](https://github.com/noir-lang/noir/commit/64dd48a19060ccce8758851ea7bcec1f287f1156)) +* Go to definition from `use` statement ([#5390](https://github.com/noir-lang/noir/issues/5390)) ([53bae3b](https://github.com/noir-lang/noir/commit/53bae3b99b2aec0b7d5c65d4f9f60e2eafdd2b1f)) +* Go to definition from aliased use ([#5396](https://github.com/noir-lang/noir/issues/5396)) ([90b135c](https://github.com/noir-lang/noir/commit/90b135c44bdf91603f2e2cdf0ab6f168087bab36)) +* Handle struct with nested arrays in oracle return values ([#5244](https://github.com/noir-lang/noir/issues/5244)) ([a30814f](https://github.com/noir-lang/noir/commit/a30814f1f767bf874cd7e2969f5061c68f16b9a7)) +* ICE when using a comptime let variable in runtime code ([#5391](https://github.com/noir-lang/noir/issues/5391)) ([9fb7e4d](https://github.com/noir-lang/noir/commit/9fb7e4d306041edc5158e2dffd71a19ccc578ac2)) +* Ignore calls to `Intrinsic::AsWitness` during brillig codegen ([#5350](https://github.com/noir-lang/noir/issues/5350)) ([9c11fd2](https://github.com/noir-lang/noir/commit/9c11fd264451a3d2b8617ee5e47e6db3fcb148d8)) +* Implement generic functions in the interpreter ([#5330](https://github.com/noir-lang/noir/issues/5330)) ([d8b9870](https://github.com/noir-lang/noir/commit/d8b9870a991b724ec337b58380b50464ba274d8a)) +* Included argshash computation in public call_interfaces and cleanup (https://github.com/AztecProtocol/aztec-packages/pull/7354) ([98e72ac](https://github.com/noir-lang/noir/commit/98e72acd72e9a01376cf69d20c539ba9dbe0942b)) +* Lsp find struct reference in return locations and paths ([#5404](https://github.com/noir-lang/noir/issues/5404)) ([e1bcb73](https://github.com/noir-lang/noir/commit/e1bcb73f8c2e2c6786faeb18b8ce070a2400635d)) +* Lsp hover wasn't always working ([#5515](https://github.com/noir-lang/noir/issues/5515)) ([951e821](https://github.com/noir-lang/noir/commit/951e821a585fe7e0697291cadd4d3c3aa49fd8e4)) +* Lsp struct rename/reference difference ([#5411](https://github.com/noir-lang/noir/issues/5411)) ([580c16d](https://github.com/noir-lang/noir/commit/580c16dd61b044c7ebfb31958822c23ea9b20ed2)) +* Move BigInt modulus checks to runtime in brillig ([#5374](https://github.com/noir-lang/noir/issues/5374)) ([741d339](https://github.com/noir-lang/noir/commit/741d33991f8e2918bf092c354ca56047e0274533)) +* Mutability in the comptime interpreter ([#5517](https://github.com/noir-lang/noir/issues/5517)) ([8cab4ac](https://github.com/noir-lang/noir/commit/8cab4ac0c0275fae691731b6d774e51b633f9478)) +* **nargo_fmt:** Account for spaces before the generic list of a function ([#5303](https://github.com/noir-lang/noir/issues/5303)) ([ec728dd](https://github.com/noir-lang/noir/commit/ec728dd909fce33ab712116f61d672b1ee552fc4)) +* Never panic in LSP inlay hints ([#5534](https://github.com/noir-lang/noir/issues/5534)) ([6b11445](https://github.com/noir-lang/noir/commit/6b11445d9913e2953a96d09f86826aa652a233c4)) +* Prevent `no_predicates` from removing predicates in calling function ([#5452](https://github.com/noir-lang/noir/issues/5452)) ([66244b6](https://github.com/noir-lang/noir/commit/66244b6e5b505f692c7e9a41bdc061c77fd1284d)) +* Remove compile-time error for invalid indices ([#5466](https://github.com/noir-lang/noir/issues/5466)) ([323e0c9](https://github.com/noir-lang/noir/commit/323e0c9d31cdec7d6bef76a418d1b663d9640143)) +* Remove panics in the interpreter when a builtin fails to type check ([#5382](https://github.com/noir-lang/noir/issues/5382)) ([c8161c8](https://github.com/noir-lang/noir/commit/c8161c81a3c6599a3b0380f4c80c730a41a75f22)) +* Replace expects in interpreter with errors ([#5383](https://github.com/noir-lang/noir/issues/5383)) ([ac738b2](https://github.com/noir-lang/noir/commit/ac738b21bc19181b021f909a8e60752dff5ac713)) +* Replace panic in monomorphization with an error ([#5305](https://github.com/noir-lang/noir/issues/5305)) ([49e1b0c](https://github.com/noir-lang/noir/commit/49e1b0c0d45565f3e87469b77f2fef0c283f6ea1)) +* Replace std::HashMap with FxHashMap to fix frontend indeterminism ([#5385](https://github.com/noir-lang/noir/issues/5385)) ([9501495](https://github.com/noir-lang/noir/commit/95014950a9685ee8fdae69457cfe45d6c509172a)) +* Revert PR [#5449](https://github.com/noir-lang/noir/issues/5449) ([#5548](https://github.com/noir-lang/noir/issues/5548)) ([a213c15](https://github.com/noir-lang/noir/commit/a213c15275892581e5d8f7235baf08a6cb137da4)) +* Run macro processors in the elaborator ([#5472](https://github.com/noir-lang/noir/issues/5472)) ([89642c2](https://github.com/noir-lang/noir/commit/89642c220791b2b91bd350960ed6a822103ccca7)) +* Runtime brillig bigint id assignment ([#5369](https://github.com/noir-lang/noir/issues/5369)) ([a8928dd](https://github.com/noir-lang/noir/commit/a8928ddcffcae15babf7aa5aff0e462e4549552e)) +* Skip emission of brillig calls which will never be executed ([#5314](https://github.com/noir-lang/noir/issues/5314)) ([b859ef9](https://github.com/noir-lang/noir/commit/b859ef90af9944a83f197c26408a55988b143e0e)) +* Truncate flamegraph text to the right (https://github.com/AztecProtocol/aztec-packages/pull/7333) ([3f71169](https://github.com/noir-lang/noir/commit/3f71169ef4ef8ea8a3dcf355bf360195bfa6772c)) +* Update `in_contract` flag before handling function metadata in elaborator ([#5292](https://github.com/noir-lang/noir/issues/5292)) ([4c4ea2d](https://github.com/noir-lang/noir/commit/4c4ea2df0163d4989c922c6a1377e04c2cd0540c)) +* Use proper serialization in `AbiValue` ([#5270](https://github.com/noir-lang/noir/issues/5270)) ([d08b7b9](https://github.com/noir-lang/noir/commit/d08b7b93a981f2e01a3d9754b194c5565ad3a7c2)) + ## [0.31.0](https://github.com/noir-lang/noir/compare/v0.30.0...v0.31.0) (2024-06-17) diff --git a/Cargo.lock b/Cargo.lock index 1df84a80bc7..f6011b705e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acir_field", "base64 0.21.2", @@ -26,7 +26,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.47.0" +version = "0.48.0" dependencies = [ "ark-bls12-381", "ark-bn254", @@ -40,7 +40,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -55,7 +55,7 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acir", "blake2", @@ -93,7 +93,7 @@ dependencies = [ [[package]] name = "acvm_js" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acvm", "bn254_blackbox_solver", @@ -443,7 +443,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aztec_macros" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "convert_case 0.6.0", @@ -571,7 +571,7 @@ dependencies = [ [[package]] name = "bn254_blackbox_solver" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -589,7 +589,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acir_field", "serde", @@ -597,7 +597,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.47.0" +version = "0.48.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -1537,7 +1537,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.31.0" +version = "0.32.0" dependencies = [ "codespan-reporting", "iter-extended", @@ -2102,7 +2102,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.31.0" +version = "0.32.0" [[package]] name = "itertools" @@ -2497,7 +2497,7 @@ dependencies = [ [[package]] name = "nargo" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "fm", @@ -2522,7 +2522,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "assert_cmd", @@ -2577,7 +2577,7 @@ dependencies = [ [[package]] name = "nargo_fmt" -version = "0.31.0" +version = "0.32.0" dependencies = [ "bytecount", "noirc_frontend", @@ -2589,11 +2589,12 @@ dependencies = [ [[package]] name = "nargo_toml" -version = "0.31.0" +version = "0.32.0" dependencies = [ "dirs", "fm", "nargo", + "noirc_driver", "noirc_frontend", "semver", "serde", @@ -2668,7 +2669,7 @@ dependencies = [ [[package]] name = "noir_debugger" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "assert_cmd", @@ -2692,7 +2693,7 @@ dependencies = [ [[package]] name = "noir_fuzzer" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "nargo", @@ -2716,7 +2717,7 @@ dependencies = [ [[package]] name = "noir_lsp" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "async-lsp", @@ -2743,7 +2744,7 @@ dependencies = [ [[package]] name = "noir_profiler" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acir", "clap", @@ -2765,7 +2766,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "build-data", @@ -2789,7 +2790,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "iter-extended", @@ -2808,7 +2809,7 @@ dependencies = [ [[package]] name = "noirc_abi_wasm" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "build-data", @@ -2825,11 +2826,11 @@ dependencies = [ [[package]] name = "noirc_arena" -version = "0.31.0" +version = "0.32.0" [[package]] name = "noirc_artifacts" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "codespan-reporting", @@ -2844,7 +2845,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "aztec_macros", @@ -2864,7 +2865,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "base64 0.21.2", @@ -2882,7 +2883,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "bn254_blackbox_solver", @@ -2901,10 +2902,11 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "base64 0.21.2", + "bn254_blackbox_solver", "cfg-if 1.0.0", "chumsky", "fm", @@ -2933,7 +2935,7 @@ dependencies = [ [[package]] name = "noirc_printable_type" -version = "0.31.0" +version = "0.32.0" dependencies = [ "acvm", "iter-extended", diff --git a/Cargo.toml b/Cargo.toml index 8cd5defa121..2263a72d1eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.31.0" +version = "0.32.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" @@ -53,13 +53,13 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir_field = { version = "0.47.0", path = "acvm-repo/acir_field", default-features = false } -acir = { version = "0.47.0", path = "acvm-repo/acir", default-features = false } -acvm = { version = "0.47.0", path = "acvm-repo/acvm" } -brillig = { version = "0.47.0", path = "acvm-repo/brillig", default-features = false } -brillig_vm = { version = "0.47.0", path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { version = "0.47.0", path = "acvm-repo/blackbox_solver", default-features = false } -bn254_blackbox_solver = { version = "0.47.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } +acir_field = { version = "0.48.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.48.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.48.0", path = "acvm-repo/acvm" } +brillig = { version = "0.48.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.48.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.48.0", path = "acvm-repo/blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.48.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies fm = { path = "compiler/fm" } diff --git a/acvm-repo/CHANGELOG.md b/acvm-repo/CHANGELOG.md index 4db36aadec9..971f8ae0448 100644 --- a/acvm-repo/CHANGELOG.md +++ b/acvm-repo/CHANGELOG.md @@ -5,6 +5,139 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.48.0](https://github.com/noir-lang/noir/compare/v0.47.0...v0.48.0) (2024-07-18) + + +### ⚠ BREAKING CHANGES + +* constant inputs for blackbox (https://github.com/AztecProtocol/aztec-packages/pull/7222) +* add session id to foreign call RPC requests ([#5205](https://github.com/noir-lang/noir/issues/5205)) +* restrict noir word size to u32 ([#5180](https://github.com/noir-lang/noir/issues/5180)) +* switch `bb` over to read ACIR from nargo artifacts (https://github.com/AztecProtocol/aztec-packages/pull/6283) +* specify databus arrays for BB (https://github.com/AztecProtocol/aztec-packages/pull/6239) +* remove `Opcode::Brillig` from ACIR (https://github.com/AztecProtocol/aztec-packages/pull/5995) +* AES blackbox (https://github.com/AztecProtocol/aztec-packages/pull/6016) +* Bit shift is restricted to u8 right operand ([#4907](https://github.com/noir-lang/noir/issues/4907)) +* contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) +* change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) +* trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) +* remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) +* storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) +* contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) +* Brillig typed memory (https://github.com/AztecProtocol/aztec-packages/pull/5395) +* **acir:** Program and witness stack structure (https://github.com/AztecProtocol/aztec-packages/pull/5149) +* automatic NoteInterface and NoteGetterOptions auto select (https://github.com/AztecProtocol/aztec-packages/pull/4508) +* Acir call opcode (https://github.com/AztecProtocol/aztec-packages/pull/4773) +* Support contracts with no constructor (https://github.com/AztecProtocol/aztec-packages/pull/5175) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) + +### Features + +* `multi_scalar_mul` blackbox func (https://github.com/AztecProtocol/aztec-packages/pull/6097) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* `variable_base_scalar_mul` blackbox func (https://github.com/AztecProtocol/aztec-packages/pull/6039) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* Acir call opcode (https://github.com/AztecProtocol/aztec-packages/pull/4773) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **acir_gen:** Brillig stdlib ([#4848](https://github.com/noir-lang/noir/issues/4848)) ([0c8175c](https://github.com/noir-lang/noir/commit/0c8175cb539efd9427c73ae5af0d48abe688ebab)) +* **acir_gen:** Fold attribute at compile-time and initial non inlined ACIR (https://github.com/AztecProtocol/aztec-packages/pull/5341) ([a0f7474](https://github.com/noir-lang/noir/commit/a0f7474ae6bd74132efdb945d2eb2383f3913cce)) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* **acir:** Program and witness stack structure (https://github.com/AztecProtocol/aztec-packages/pull/5149) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* Activate return_data in ACIR opcodes ([#5080](https://github.com/noir-lang/noir/issues/5080)) ([c9fda3c](https://github.com/noir-lang/noir/commit/c9fda3c7fd4575bfe7d457e8d4230e071f0129a0)) +* **acvm_js:** Execute program ([#4694](https://github.com/noir-lang/noir/issues/4694)) ([386f6d0](https://github.com/noir-lang/noir/commit/386f6d0a5822912db878285cb001032a7c0ff622)) +* **acvm:** Execute multiple circuits (https://github.com/AztecProtocol/aztec-packages/pull/5380) ([a0f7474](https://github.com/noir-lang/noir/commit/a0f7474ae6bd74132efdb945d2eb2383f3913cce)) +* Add CMOV instruction to brillig and brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/5308) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* Add native rust implementation of schnorr signature verification ([#5053](https://github.com/noir-lang/noir/issues/5053)) ([fab1c35](https://github.com/noir-lang/noir/commit/fab1c3567d731ea7902635a7a020a8d14f94fd27)) +* Add native rust implementations of pedersen functions ([#4871](https://github.com/noir-lang/noir/issues/4871)) ([fb039f7](https://github.com/noir-lang/noir/commit/fb039f74df23aea39bc0593a5d538d82b4efadf0)) +* Add return values to aztec fns (https://github.com/AztecProtocol/aztec-packages/pull/5389) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Add session id to foreign call RPC requests ([#5205](https://github.com/noir-lang/noir/issues/5205)) ([14adafc](https://github.com/noir-lang/noir/commit/14adafc965fa9c833e096ec037e086aae67703ad)) +* AES blackbox (https://github.com/AztecProtocol/aztec-packages/pull/6016) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* Automatic NoteInterface and NoteGetterOptions auto select (https://github.com/AztecProtocol/aztec-packages/pull/4508) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* **avm:** Brillig CONST of size > u128 (https://github.com/AztecProtocol/aztec-packages/pull/5217) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **avm:** Integrate AVM with initializers (https://github.com/AztecProtocol/aztec-packages/pull/5469) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Bit shift is restricted to u8 right operand ([#4907](https://github.com/noir-lang/noir/issues/4907)) ([c4b0369](https://github.com/noir-lang/noir/commit/c4b03691feca17ef268acab523292f3051f672ea)) +* Brillig heterogeneous memory cells (https://github.com/AztecProtocol/aztec-packages/pull/5608) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Brillig IR refactor (https://github.com/AztecProtocol/aztec-packages/pull/5233) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Brillig pointer codegen and execution (https://github.com/AztecProtocol/aztec-packages/pull/5737) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Brillig typed memory (https://github.com/AztecProtocol/aztec-packages/pull/5395) ([0bc18c4](https://github.com/noir-lang/noir/commit/0bc18c4f78171590dd58bded959f68f53a44cc8c)) +* Change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Check initializer msg.sender matches deployer from address preimage (https://github.com/AztecProtocol/aztec-packages/pull/5222) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Constant inputs for blackbox (https://github.com/AztecProtocol/aztec-packages/pull/7222) ([fb97bb9](https://github.com/noir-lang/noir/commit/fb97bb9b795c9d7af395b82fd6f0ea8111d59c11)) +* Contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Dynamic assertion payloads v2 (https://github.com/AztecProtocol/aztec-packages/pull/5949) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* Handle `BrilligCall` opcodes in the debugger ([#4897](https://github.com/noir-lang/noir/issues/4897)) ([b380dc4](https://github.com/noir-lang/noir/commit/b380dc44de5c9f8de278ece3d531ebbc2c9238ba)) +* Impl of missing functionality in new key store (https://github.com/AztecProtocol/aztec-packages/pull/5750) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Increase default expression width to 4 ([#4995](https://github.com/noir-lang/noir/issues/4995)) ([f01d309](https://github.com/noir-lang/noir/commit/f01d3090759a5ff0f1f83c5616d22890c6bd76be)) +* Initial Earthly CI (https://github.com/AztecProtocol/aztec-packages/pull/5069) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Make ACVM generic across fields ([#5114](https://github.com/noir-lang/noir/issues/5114)) ([70f374c](https://github.com/noir-lang/noir/commit/70f374c06642962d8f2b95b80f8c938fcf7761d7)) +* Move abi demonomorphizer to noir_codegen and use noir_codegen in protocol types (https://github.com/AztecProtocol/aztec-packages/pull/6302) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Move to_radix to a blackbox (https://github.com/AztecProtocol/aztec-packages/pull/6294) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* **nargo:** Handle call stacks for multiple Acir calls ([#4711](https://github.com/noir-lang/noir/issues/4711)) ([5b23171](https://github.com/noir-lang/noir/commit/5b231714740447d82cde7cdbe65d4a8b46a31df4)) +* **nargo:** Hidden option to show contract artifact paths written by `nargo compile` (https://github.com/AztecProtocol/aztec-packages/pull/6131) ([ff67e14](https://github.com/noir-lang/noir/commit/ff67e145d086bf6fdf58fb5e57927033e52e03d3)) +* New brillig field operations and refactor of binary operations (https://github.com/AztecProtocol/aztec-packages/pull/5208) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Parsing non-string assertion payloads in noir js (https://github.com/AztecProtocol/aztec-packages/pull/6079) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* Private Kernel Recursion (https://github.com/AztecProtocol/aztec-packages/pull/6278) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Proper padding in ts AES and constrained AES in body and header computations (https://github.com/AztecProtocol/aztec-packages/pull/6269) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Remove conditional compilation of `bn254_blackbox_solver` ([#5058](https://github.com/noir-lang/noir/issues/5058)) ([9420d7c](https://github.com/noir-lang/noir/commit/9420d7c2ba6bbbf5ecb9a066837c505310955b6c)) +* Remove external blackbox solver from acir simulator (https://github.com/AztecProtocol/aztec-packages/pull/6586) ([a40a9a5](https://github.com/noir-lang/noir/commit/a40a9a55571deed386688fb84260bdf2794d4d38)) +* Restore hashing args via slice for performance (https://github.com/AztecProtocol/aztec-packages/pull/5539) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Restrict noir word size to u32 ([#5180](https://github.com/noir-lang/noir/issues/5180)) ([bdb2bc6](https://github.com/noir-lang/noir/commit/bdb2bc608ea8fd52d46545a38b68dd2558b28110)) +* Separate runtimes of SSA functions before inlining ([#5121](https://github.com/noir-lang/noir/issues/5121)) ([69eca9b](https://github.com/noir-lang/noir/commit/69eca9b8671fa54192bef814dd584fdb5387a5f7)) +* Set aztec private functions to be recursive (https://github.com/AztecProtocol/aztec-packages/pull/6192) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* Signed integer division and modulus in brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/5279) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **simulator:** Fetch return values at circuit execution (https://github.com/AztecProtocol/aztec-packages/pull/5642) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Specify databus arrays for BB (https://github.com/AztecProtocol/aztec-packages/pull/6239) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Support contracts with no constructor (https://github.com/AztecProtocol/aztec-packages/pull/5175) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Switch `bb` over to read ACIR from nargo artifacts (https://github.com/AztecProtocol/aztec-packages/pull/6283) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Sync from aztec-packages ([#4483](https://github.com/noir-lang/noir/issues/4483)) ([fe8f277](https://github.com/noir-lang/noir/commit/fe8f2776ccfde29209a2c3fc162311c99e4f59be)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5234) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5286) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5572) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5619) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5697) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5794) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5814) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5935) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5955) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5999) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/6280) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/6332) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/6573) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7392) ([fb97bb9](https://github.com/noir-lang/noir/commit/fb97bb9b795c9d7af395b82fd6f0ea8111d59c11)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/7400) ([fb97bb9](https://github.com/noir-lang/noir/commit/fb97bb9b795c9d7af395b82fd6f0ea8111d59c11)) +* ToRadix BB + avm transpiler support (https://github.com/AztecProtocol/aztec-packages/pull/6330) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Variable length returns (https://github.com/AztecProtocol/aztec-packages/pull/5633) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) + + +### Bug Fixes + +* **acvm:** Mark outputs of Opcode::Call solvable ([#4708](https://github.com/noir-lang/noir/issues/4708)) ([8fea405](https://github.com/noir-lang/noir/commit/8fea40576f262bd5bb588923c0660d8967404e56)) +* Add support for nested arrays returned by oracles ([#5132](https://github.com/noir-lang/noir/issues/5132)) ([f846879](https://github.com/noir-lang/noir/commit/f846879dd038328bd0a1d39a72b448ef52a1002b)) +* Avoid huge unrolling in hash_args (https://github.com/AztecProtocol/aztec-packages/pull/5703) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Avoid unnecessarily splitting expressions with multiplication terms with a shared term ([#5291](https://github.com/noir-lang/noir/issues/5291)) ([19884f1](https://github.com/noir-lang/noir/commit/19884f161dfc7d7ce75dd2c404b8ef39cdad2240)) +* Catch panics from EC point creation (e.g. the point is at infinity) ([#4790](https://github.com/noir-lang/noir/issues/4790)) ([645dba1](https://github.com/noir-lang/noir/commit/645dba192f16ef34018828186ffb297422a8dc73)) +* Check for public args in aztec functions (https://github.com/AztecProtocol/aztec-packages/pull/6355) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) +* Don't reuse brillig with slice arguments (https://github.com/AztecProtocol/aztec-packages/pull/5800) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Handle struct with nested arrays in oracle return values ([#5244](https://github.com/noir-lang/noir/issues/5244)) ([a30814f](https://github.com/noir-lang/noir/commit/a30814f1f767bf874cd7e2969f5061c68f16b9a7)) +* Issue 4682 and add solver for unconstrained bigintegers ([#4729](https://github.com/noir-lang/noir/issues/4729)) ([e4d33c1](https://github.com/noir-lang/noir/commit/e4d33c126a2795d9aaa6048d4e91b64cb4bbe4f2)) +* Move BigInt modulus checks to runtime in brillig ([#5374](https://github.com/noir-lang/noir/issues/5374)) ([741d339](https://github.com/noir-lang/noir/commit/741d33991f8e2918bf092c354ca56047e0274533)) +* Noir test incorrect reporting (https://github.com/AztecProtocol/aztec-packages/pull/4925) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Proper field inversion for bigints ([#4802](https://github.com/noir-lang/noir/issues/4802)) ([b46d0e3](https://github.com/noir-lang/noir/commit/b46d0e39f4252f8bbaa987f88d112e4c233b3d61)) +* Runtime brillig bigint id assignment ([#5369](https://github.com/noir-lang/noir/issues/5369)) ([a8928dd](https://github.com/noir-lang/noir/commit/a8928ddcffcae15babf7aa5aff0e462e4549552e)) +* Temporarily revert to_radix blackbox (https://github.com/AztecProtocol/aztec-packages/pull/6304) ([436bbda](https://github.com/noir-lang/noir/commit/436bbdaadb2a294b94f93e53d7d3cad3859c7e46)) + + +### Miscellaneous Chores + +* Remove `Opcode::Brillig` from ACIR (https://github.com/AztecProtocol/aztec-packages/pull/5995) ([73a635e](https://github.com/noir-lang/noir/commit/73a635e5086cf3407f9846ce39807cd15b4e485a)) +* Remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) + ## [0.47.0](https://github.com/noir-lang/noir/compare/v0.46.0...v0.47.0) (2024-06-17) diff --git a/acvm-repo/acir/Cargo.toml b/acvm-repo/acir/Cargo.toml index 7a8f10c98ef..68c28dccc66 100644 --- a/acvm-repo/acir/Cargo.toml +++ b/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acir/codegen/acir.cpp b/acvm-repo/acir/codegen/acir.cpp index c1160930571..232b3ba12cf 100644 --- a/acvm-repo/acir/codegen/acir.cpp +++ b/acvm-repo/acir/codegen/acir.cpp @@ -5,704 +5,819 @@ namespace Program { - struct Witness { - uint32_t value; + struct BinaryFieldOp { - friend bool operator==(const Witness&, const Witness&); - std::vector bincodeSerialize() const; - static Witness bincodeDeserialize(std::vector); - }; + struct Add { + friend bool operator==(const Add&, const Add&); + std::vector bincodeSerialize() const; + static Add bincodeDeserialize(std::vector); + }; - struct ConstantOrWitnessEnum { + struct Sub { + friend bool operator==(const Sub&, const Sub&); + std::vector bincodeSerialize() const; + static Sub bincodeDeserialize(std::vector); + }; - struct Constant { - std::string value; + struct Mul { + friend bool operator==(const Mul&, const Mul&); + std::vector bincodeSerialize() const; + static Mul bincodeDeserialize(std::vector); + }; - friend bool operator==(const Constant&, const Constant&); + struct Div { + friend bool operator==(const Div&, const Div&); std::vector bincodeSerialize() const; - static Constant bincodeDeserialize(std::vector); + static Div bincodeDeserialize(std::vector); }; - struct Witness { - Program::Witness value; + struct IntegerDiv { + friend bool operator==(const IntegerDiv&, const IntegerDiv&); + std::vector bincodeSerialize() const; + static IntegerDiv bincodeDeserialize(std::vector); + }; - friend bool operator==(const Witness&, const Witness&); + struct Equals { + friend bool operator==(const Equals&, const Equals&); std::vector bincodeSerialize() const; - static Witness bincodeDeserialize(std::vector); + static Equals bincodeDeserialize(std::vector); }; - std::variant value; + struct LessThan { + friend bool operator==(const LessThan&, const LessThan&); + std::vector bincodeSerialize() const; + static LessThan bincodeDeserialize(std::vector); + }; - friend bool operator==(const ConstantOrWitnessEnum&, const ConstantOrWitnessEnum&); - std::vector bincodeSerialize() const; - static ConstantOrWitnessEnum bincodeDeserialize(std::vector); - }; + struct LessThanEquals { + friend bool operator==(const LessThanEquals&, const LessThanEquals&); + std::vector bincodeSerialize() const; + static LessThanEquals bincodeDeserialize(std::vector); + }; - struct FunctionInput { - Program::ConstantOrWitnessEnum input; - uint32_t num_bits; + std::variant value; - friend bool operator==(const FunctionInput&, const FunctionInput&); + friend bool operator==(const BinaryFieldOp&, const BinaryFieldOp&); std::vector bincodeSerialize() const; - static FunctionInput bincodeDeserialize(std::vector); + static BinaryFieldOp bincodeDeserialize(std::vector); }; - struct BlackBoxFuncCall { - - struct AES128Encrypt { - std::vector inputs; - std::array iv; - std::array key; - std::vector outputs; + struct BinaryIntOp { - friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + struct Add { + friend bool operator==(const Add&, const Add&); std::vector bincodeSerialize() const; - static AES128Encrypt bincodeDeserialize(std::vector); + static Add bincodeDeserialize(std::vector); }; - struct AND { - Program::FunctionInput lhs; - Program::FunctionInput rhs; - Program::Witness output; - - friend bool operator==(const AND&, const AND&); + struct Sub { + friend bool operator==(const Sub&, const Sub&); std::vector bincodeSerialize() const; - static AND bincodeDeserialize(std::vector); + static Sub bincodeDeserialize(std::vector); }; - struct XOR { - Program::FunctionInput lhs; - Program::FunctionInput rhs; - Program::Witness output; - - friend bool operator==(const XOR&, const XOR&); + struct Mul { + friend bool operator==(const Mul&, const Mul&); std::vector bincodeSerialize() const; - static XOR bincodeDeserialize(std::vector); + static Mul bincodeDeserialize(std::vector); }; - struct RANGE { - Program::FunctionInput input; - - friend bool operator==(const RANGE&, const RANGE&); + struct Div { + friend bool operator==(const Div&, const Div&); std::vector bincodeSerialize() const; - static RANGE bincodeDeserialize(std::vector); + static Div bincodeDeserialize(std::vector); }; - struct SHA256 { - std::vector inputs; - std::array outputs; - - friend bool operator==(const SHA256&, const SHA256&); + struct Equals { + friend bool operator==(const Equals&, const Equals&); std::vector bincodeSerialize() const; - static SHA256 bincodeDeserialize(std::vector); + static Equals bincodeDeserialize(std::vector); }; - struct Blake2s { - std::vector inputs; - std::array outputs; - - friend bool operator==(const Blake2s&, const Blake2s&); + struct LessThan { + friend bool operator==(const LessThan&, const LessThan&); std::vector bincodeSerialize() const; - static Blake2s bincodeDeserialize(std::vector); + static LessThan bincodeDeserialize(std::vector); }; - struct Blake3 { - std::vector inputs; - std::array outputs; - - friend bool operator==(const Blake3&, const Blake3&); + struct LessThanEquals { + friend bool operator==(const LessThanEquals&, const LessThanEquals&); std::vector bincodeSerialize() const; - static Blake3 bincodeDeserialize(std::vector); + static LessThanEquals bincodeDeserialize(std::vector); }; - struct SchnorrVerify { - Program::FunctionInput public_key_x; - Program::FunctionInput public_key_y; - std::array signature; - std::vector message; - Program::Witness output; - - friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); + struct And { + friend bool operator==(const And&, const And&); std::vector bincodeSerialize() const; - static SchnorrVerify bincodeDeserialize(std::vector); + static And bincodeDeserialize(std::vector); }; - struct PedersenCommitment { - std::vector inputs; - uint32_t domain_separator; - std::array outputs; - - friend bool operator==(const PedersenCommitment&, const PedersenCommitment&); + struct Or { + friend bool operator==(const Or&, const Or&); std::vector bincodeSerialize() const; - static PedersenCommitment bincodeDeserialize(std::vector); + static Or bincodeDeserialize(std::vector); }; - struct PedersenHash { - std::vector inputs; - uint32_t domain_separator; - Program::Witness output; - - friend bool operator==(const PedersenHash&, const PedersenHash&); + struct Xor { + friend bool operator==(const Xor&, const Xor&); std::vector bincodeSerialize() const; - static PedersenHash bincodeDeserialize(std::vector); + static Xor bincodeDeserialize(std::vector); }; - struct EcdsaSecp256k1 { - std::array public_key_x; - std::array public_key_y; - std::array signature; - std::array hashed_message; - Program::Witness output; + struct Shl { + friend bool operator==(const Shl&, const Shl&); + std::vector bincodeSerialize() const; + static Shl bincodeDeserialize(std::vector); + }; - friend bool operator==(const EcdsaSecp256k1&, const EcdsaSecp256k1&); + struct Shr { + friend bool operator==(const Shr&, const Shr&); std::vector bincodeSerialize() const; - static EcdsaSecp256k1 bincodeDeserialize(std::vector); + static Shr bincodeDeserialize(std::vector); }; - struct EcdsaSecp256r1 { - std::array public_key_x; - std::array public_key_y; - std::array signature; - std::array hashed_message; - Program::Witness output; + std::variant value; - friend bool operator==(const EcdsaSecp256r1&, const EcdsaSecp256r1&); + friend bool operator==(const BinaryIntOp&, const BinaryIntOp&); + std::vector bincodeSerialize() const; + static BinaryIntOp bincodeDeserialize(std::vector); + }; + + struct IntegerBitSize { + + struct U0 { + friend bool operator==(const U0&, const U0&); std::vector bincodeSerialize() const; - static EcdsaSecp256r1 bincodeDeserialize(std::vector); + static U0 bincodeDeserialize(std::vector); }; - struct MultiScalarMul { - std::vector points; - std::vector scalars; - std::array outputs; + struct U1 { + friend bool operator==(const U1&, const U1&); + std::vector bincodeSerialize() const; + static U1 bincodeDeserialize(std::vector); + }; - friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); + struct U8 { + friend bool operator==(const U8&, const U8&); std::vector bincodeSerialize() const; - static MultiScalarMul bincodeDeserialize(std::vector); + static U8 bincodeDeserialize(std::vector); }; - struct EmbeddedCurveAdd { - std::array input1; - std::array input2; - std::array outputs; + struct U16 { + friend bool operator==(const U16&, const U16&); + std::vector bincodeSerialize() const; + static U16 bincodeDeserialize(std::vector); + }; - friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); + struct U32 { + friend bool operator==(const U32&, const U32&); std::vector bincodeSerialize() const; - static EmbeddedCurveAdd bincodeDeserialize(std::vector); + static U32 bincodeDeserialize(std::vector); }; - struct Keccak256 { - std::vector inputs; - Program::FunctionInput var_message_size; - std::array outputs; + struct U64 { + friend bool operator==(const U64&, const U64&); + std::vector bincodeSerialize() const; + static U64 bincodeDeserialize(std::vector); + }; - friend bool operator==(const Keccak256&, const Keccak256&); + struct U128 { + friend bool operator==(const U128&, const U128&); std::vector bincodeSerialize() const; - static Keccak256 bincodeDeserialize(std::vector); + static U128 bincodeDeserialize(std::vector); }; - struct Keccakf1600 { - std::array inputs; - std::array outputs; + std::variant value; - friend bool operator==(const Keccakf1600&, const Keccakf1600&); + friend bool operator==(const IntegerBitSize&, const IntegerBitSize&); + std::vector bincodeSerialize() const; + static IntegerBitSize bincodeDeserialize(std::vector); + }; + + struct BitSize { + + struct Field { + friend bool operator==(const Field&, const Field&); std::vector bincodeSerialize() const; - static Keccakf1600 bincodeDeserialize(std::vector); + static Field bincodeDeserialize(std::vector); }; - struct RecursiveAggregation { - std::vector verification_key; - std::vector proof; - std::vector public_inputs; - Program::FunctionInput key_hash; + struct Integer { + Program::IntegerBitSize value; - friend bool operator==(const RecursiveAggregation&, const RecursiveAggregation&); + friend bool operator==(const Integer&, const Integer&); std::vector bincodeSerialize() const; - static RecursiveAggregation bincodeDeserialize(std::vector); + static Integer bincodeDeserialize(std::vector); }; - struct BigIntAdd { - uint32_t lhs; - uint32_t rhs; - uint32_t output; + std::variant value; - friend bool operator==(const BigIntAdd&, const BigIntAdd&); + friend bool operator==(const BitSize&, const BitSize&); + std::vector bincodeSerialize() const; + static BitSize bincodeDeserialize(std::vector); + }; + + struct MemoryAddress { + uint64_t value; + + friend bool operator==(const MemoryAddress&, const MemoryAddress&); + std::vector bincodeSerialize() const; + static MemoryAddress bincodeDeserialize(std::vector); + }; + + struct HeapArray { + Program::MemoryAddress pointer; + uint64_t size; + + friend bool operator==(const HeapArray&, const HeapArray&); + std::vector bincodeSerialize() const; + static HeapArray bincodeDeserialize(std::vector); + }; + + struct HeapVector { + Program::MemoryAddress pointer; + Program::MemoryAddress size; + + friend bool operator==(const HeapVector&, const HeapVector&); + std::vector bincodeSerialize() const; + static HeapVector bincodeDeserialize(std::vector); + }; + + struct BlackBoxOp { + + struct AES128Encrypt { + Program::HeapVector inputs; + Program::HeapArray iv; + Program::HeapArray key; + Program::HeapVector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); std::vector bincodeSerialize() const; - static BigIntAdd bincodeDeserialize(std::vector); + static AES128Encrypt bincodeDeserialize(std::vector); }; - struct BigIntSub { - uint32_t lhs; - uint32_t rhs; - uint32_t output; + struct Sha256 { + Program::HeapVector message; + Program::HeapArray output; - friend bool operator==(const BigIntSub&, const BigIntSub&); + friend bool operator==(const Sha256&, const Sha256&); std::vector bincodeSerialize() const; - static BigIntSub bincodeDeserialize(std::vector); + static Sha256 bincodeDeserialize(std::vector); }; - struct BigIntMul { - uint32_t lhs; - uint32_t rhs; - uint32_t output; + struct Blake2s { + Program::HeapVector message; + Program::HeapArray output; - friend bool operator==(const BigIntMul&, const BigIntMul&); + friend bool operator==(const Blake2s&, const Blake2s&); std::vector bincodeSerialize() const; - static BigIntMul bincodeDeserialize(std::vector); + static Blake2s bincodeDeserialize(std::vector); }; - struct BigIntDiv { - uint32_t lhs; - uint32_t rhs; - uint32_t output; + struct Blake3 { + Program::HeapVector message; + Program::HeapArray output; - friend bool operator==(const BigIntDiv&, const BigIntDiv&); + friend bool operator==(const Blake3&, const Blake3&); std::vector bincodeSerialize() const; - static BigIntDiv bincodeDeserialize(std::vector); + static Blake3 bincodeDeserialize(std::vector); }; - struct BigIntFromLeBytes { - std::vector inputs; - std::vector modulus; - uint32_t output; + struct Keccak256 { + Program::HeapVector message; + Program::HeapArray output; - friend bool operator==(const BigIntFromLeBytes&, const BigIntFromLeBytes&); + friend bool operator==(const Keccak256&, const Keccak256&); std::vector bincodeSerialize() const; - static BigIntFromLeBytes bincodeDeserialize(std::vector); + static Keccak256 bincodeDeserialize(std::vector); }; - struct BigIntToLeBytes { - uint32_t input; - std::vector outputs; + struct Keccakf1600 { + Program::HeapVector message; + Program::HeapArray output; - friend bool operator==(const BigIntToLeBytes&, const BigIntToLeBytes&); + friend bool operator==(const Keccakf1600&, const Keccakf1600&); std::vector bincodeSerialize() const; - static BigIntToLeBytes bincodeDeserialize(std::vector); + static Keccakf1600 bincodeDeserialize(std::vector); }; - struct Poseidon2Permutation { - std::vector inputs; - std::vector outputs; - uint32_t len; + struct EcdsaSecp256k1 { + Program::HeapVector hashed_msg; + Program::HeapArray public_key_x; + Program::HeapArray public_key_y; + Program::HeapArray signature; + Program::MemoryAddress result; - friend bool operator==(const Poseidon2Permutation&, const Poseidon2Permutation&); + friend bool operator==(const EcdsaSecp256k1&, const EcdsaSecp256k1&); std::vector bincodeSerialize() const; - static Poseidon2Permutation bincodeDeserialize(std::vector); + static EcdsaSecp256k1 bincodeDeserialize(std::vector); }; - struct Sha256Compression { - std::array inputs; - std::array hash_values; - std::array outputs; + struct EcdsaSecp256r1 { + Program::HeapVector hashed_msg; + Program::HeapArray public_key_x; + Program::HeapArray public_key_y; + Program::HeapArray signature; + Program::MemoryAddress result; - friend bool operator==(const Sha256Compression&, const Sha256Compression&); + friend bool operator==(const EcdsaSecp256r1&, const EcdsaSecp256r1&); std::vector bincodeSerialize() const; - static Sha256Compression bincodeDeserialize(std::vector); + static EcdsaSecp256r1 bincodeDeserialize(std::vector); }; - std::variant value; + struct SchnorrVerify { + Program::MemoryAddress public_key_x; + Program::MemoryAddress public_key_y; + Program::HeapVector message; + Program::HeapVector signature; + Program::MemoryAddress result; - friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); - std::vector bincodeSerialize() const; - static BlackBoxFuncCall bincodeDeserialize(std::vector); - }; + friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); + std::vector bincodeSerialize() const; + static SchnorrVerify bincodeDeserialize(std::vector); + }; - struct BlockId { - uint32_t value; + struct PedersenCommitment { + Program::HeapVector inputs; + Program::MemoryAddress domain_separator; + Program::HeapArray output; - friend bool operator==(const BlockId&, const BlockId&); - std::vector bincodeSerialize() const; - static BlockId bincodeDeserialize(std::vector); - }; + friend bool operator==(const PedersenCommitment&, const PedersenCommitment&); + std::vector bincodeSerialize() const; + static PedersenCommitment bincodeDeserialize(std::vector); + }; - struct BlockType { + struct PedersenHash { + Program::HeapVector inputs; + Program::MemoryAddress domain_separator; + Program::MemoryAddress output; - struct Memory { - friend bool operator==(const Memory&, const Memory&); + friend bool operator==(const PedersenHash&, const PedersenHash&); std::vector bincodeSerialize() const; - static Memory bincodeDeserialize(std::vector); + static PedersenHash bincodeDeserialize(std::vector); }; - struct CallData { - friend bool operator==(const CallData&, const CallData&); + struct MultiScalarMul { + Program::HeapVector points; + Program::HeapVector scalars; + Program::HeapArray outputs; + + friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; - static CallData bincodeDeserialize(std::vector); + static MultiScalarMul bincodeDeserialize(std::vector); }; - struct ReturnData { - friend bool operator==(const ReturnData&, const ReturnData&); + struct EmbeddedCurveAdd { + Program::MemoryAddress input1_x; + Program::MemoryAddress input1_y; + Program::MemoryAddress input1_infinite; + Program::MemoryAddress input2_x; + Program::MemoryAddress input2_y; + Program::MemoryAddress input2_infinite; + Program::HeapArray result; + + friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); std::vector bincodeSerialize() const; - static ReturnData bincodeDeserialize(std::vector); + static EmbeddedCurveAdd bincodeDeserialize(std::vector); }; - std::variant value; - - friend bool operator==(const BlockType&, const BlockType&); - std::vector bincodeSerialize() const; - static BlockType bincodeDeserialize(std::vector); - }; + struct BigIntAdd { + Program::MemoryAddress lhs; + Program::MemoryAddress rhs; + Program::MemoryAddress output; - struct Expression { - std::vector> mul_terms; - std::vector> linear_combinations; - std::string q_c; + friend bool operator==(const BigIntAdd&, const BigIntAdd&); + std::vector bincodeSerialize() const; + static BigIntAdd bincodeDeserialize(std::vector); + }; - friend bool operator==(const Expression&, const Expression&); - std::vector bincodeSerialize() const; - static Expression bincodeDeserialize(std::vector); - }; + struct BigIntSub { + Program::MemoryAddress lhs; + Program::MemoryAddress rhs; + Program::MemoryAddress output; - struct BrilligInputs { + friend bool operator==(const BigIntSub&, const BigIntSub&); + std::vector bincodeSerialize() const; + static BigIntSub bincodeDeserialize(std::vector); + }; - struct Single { - Program::Expression value; + struct BigIntMul { + Program::MemoryAddress lhs; + Program::MemoryAddress rhs; + Program::MemoryAddress output; - friend bool operator==(const Single&, const Single&); + friend bool operator==(const BigIntMul&, const BigIntMul&); std::vector bincodeSerialize() const; - static Single bincodeDeserialize(std::vector); + static BigIntMul bincodeDeserialize(std::vector); }; - struct Array { - std::vector value; + struct BigIntDiv { + Program::MemoryAddress lhs; + Program::MemoryAddress rhs; + Program::MemoryAddress output; - friend bool operator==(const Array&, const Array&); + friend bool operator==(const BigIntDiv&, const BigIntDiv&); std::vector bincodeSerialize() const; - static Array bincodeDeserialize(std::vector); + static BigIntDiv bincodeDeserialize(std::vector); }; - struct MemoryArray { - Program::BlockId value; + struct BigIntFromLeBytes { + Program::HeapVector inputs; + Program::HeapVector modulus; + Program::MemoryAddress output; - friend bool operator==(const MemoryArray&, const MemoryArray&); + friend bool operator==(const BigIntFromLeBytes&, const BigIntFromLeBytes&); std::vector bincodeSerialize() const; - static MemoryArray bincodeDeserialize(std::vector); + static BigIntFromLeBytes bincodeDeserialize(std::vector); }; - std::variant value; - - friend bool operator==(const BrilligInputs&, const BrilligInputs&); - std::vector bincodeSerialize() const; - static BrilligInputs bincodeDeserialize(std::vector); - }; - - struct BrilligOutputs { - - struct Simple { - Program::Witness value; + struct BigIntToLeBytes { + Program::MemoryAddress input; + Program::HeapVector output; - friend bool operator==(const Simple&, const Simple&); + friend bool operator==(const BigIntToLeBytes&, const BigIntToLeBytes&); std::vector bincodeSerialize() const; - static Simple bincodeDeserialize(std::vector); + static BigIntToLeBytes bincodeDeserialize(std::vector); }; - struct Array { - std::vector value; + struct Poseidon2Permutation { + Program::HeapVector message; + Program::HeapArray output; + Program::MemoryAddress len; - friend bool operator==(const Array&, const Array&); + friend bool operator==(const Poseidon2Permutation&, const Poseidon2Permutation&); std::vector bincodeSerialize() const; - static Array bincodeDeserialize(std::vector); + static Poseidon2Permutation bincodeDeserialize(std::vector); }; - std::variant value; - - friend bool operator==(const BrilligOutputs&, const BrilligOutputs&); - std::vector bincodeSerialize() const; - static BrilligOutputs bincodeDeserialize(std::vector); - }; + struct Sha256Compression { + Program::HeapVector input; + Program::HeapVector hash_values; + Program::HeapArray output; - struct Directive { + friend bool operator==(const Sha256Compression&, const Sha256Compression&); + std::vector bincodeSerialize() const; + static Sha256Compression bincodeDeserialize(std::vector); + }; - struct ToLeRadix { - Program::Expression a; - std::vector b; + struct ToRadix { + Program::MemoryAddress input; uint32_t radix; + Program::HeapArray output; - friend bool operator==(const ToLeRadix&, const ToLeRadix&); + friend bool operator==(const ToRadix&, const ToRadix&); std::vector bincodeSerialize() const; - static ToLeRadix bincodeDeserialize(std::vector); + static ToRadix bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; - friend bool operator==(const Directive&, const Directive&); + friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; - static Directive bincodeDeserialize(std::vector); + static BlackBoxOp bincodeDeserialize(std::vector); }; - struct MemOp { - Program::Expression operation; - Program::Expression index; - Program::Expression value; - - friend bool operator==(const MemOp&, const MemOp&); - std::vector bincodeSerialize() const; - static MemOp bincodeDeserialize(std::vector); - }; + struct HeapValueType; - struct Opcode { + struct HeapValueType { - struct AssertZero { - Program::Expression value; + struct Simple { + Program::BitSize value; - friend bool operator==(const AssertZero&, const AssertZero&); + friend bool operator==(const Simple&, const Simple&); std::vector bincodeSerialize() const; - static AssertZero bincodeDeserialize(std::vector); + static Simple bincodeDeserialize(std::vector); }; - struct BlackBoxFuncCall { - Program::BlackBoxFuncCall value; + struct Array { + std::vector value_types; + uint64_t size; - friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); + friend bool operator==(const Array&, const Array&); std::vector bincodeSerialize() const; - static BlackBoxFuncCall bincodeDeserialize(std::vector); + static Array bincodeDeserialize(std::vector); }; - struct Directive { - Program::Directive value; + struct Vector { + std::vector value_types; - friend bool operator==(const Directive&, const Directive&); + friend bool operator==(const Vector&, const Vector&); std::vector bincodeSerialize() const; - static Directive bincodeDeserialize(std::vector); + static Vector bincodeDeserialize(std::vector); }; - struct MemoryOp { - Program::BlockId block_id; - Program::MemOp op; - std::optional predicate; + std::variant value; - friend bool operator==(const MemoryOp&, const MemoryOp&); - std::vector bincodeSerialize() const; - static MemoryOp bincodeDeserialize(std::vector); - }; + friend bool operator==(const HeapValueType&, const HeapValueType&); + std::vector bincodeSerialize() const; + static HeapValueType bincodeDeserialize(std::vector); + }; - struct MemoryInit { - Program::BlockId block_id; - std::vector init; - Program::BlockType block_type; + struct ValueOrArray { - friend bool operator==(const MemoryInit&, const MemoryInit&); + struct MemoryAddress { + Program::MemoryAddress value; + + friend bool operator==(const MemoryAddress&, const MemoryAddress&); std::vector bincodeSerialize() const; - static MemoryInit bincodeDeserialize(std::vector); + static MemoryAddress bincodeDeserialize(std::vector); }; - struct BrilligCall { - uint32_t id; - std::vector inputs; - std::vector outputs; - std::optional predicate; + struct HeapArray { + Program::HeapArray value; - friend bool operator==(const BrilligCall&, const BrilligCall&); + friend bool operator==(const HeapArray&, const HeapArray&); std::vector bincodeSerialize() const; - static BrilligCall bincodeDeserialize(std::vector); + static HeapArray bincodeDeserialize(std::vector); }; - struct Call { - uint32_t id; - std::vector inputs; - std::vector outputs; - std::optional predicate; + struct HeapVector { + Program::HeapVector value; - friend bool operator==(const Call&, const Call&); + friend bool operator==(const HeapVector&, const HeapVector&); std::vector bincodeSerialize() const; - static Call bincodeDeserialize(std::vector); + static HeapVector bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; - friend bool operator==(const Opcode&, const Opcode&); + friend bool operator==(const ValueOrArray&, const ValueOrArray&); std::vector bincodeSerialize() const; - static Opcode bincodeDeserialize(std::vector); + static ValueOrArray bincodeDeserialize(std::vector); }; - struct BinaryFieldOp { + struct BrilligOpcode { - struct Add { - friend bool operator==(const Add&, const Add&); - std::vector bincodeSerialize() const; - static Add bincodeDeserialize(std::vector); - }; + struct BinaryFieldOp { + Program::MemoryAddress destination; + Program::BinaryFieldOp op; + Program::MemoryAddress lhs; + Program::MemoryAddress rhs; - struct Sub { - friend bool operator==(const Sub&, const Sub&); + friend bool operator==(const BinaryFieldOp&, const BinaryFieldOp&); std::vector bincodeSerialize() const; - static Sub bincodeDeserialize(std::vector); + static BinaryFieldOp bincodeDeserialize(std::vector); }; - struct Mul { - friend bool operator==(const Mul&, const Mul&); - std::vector bincodeSerialize() const; - static Mul bincodeDeserialize(std::vector); - }; + struct BinaryIntOp { + Program::MemoryAddress destination; + Program::BinaryIntOp op; + Program::IntegerBitSize bit_size; + Program::MemoryAddress lhs; + Program::MemoryAddress rhs; - struct Div { - friend bool operator==(const Div&, const Div&); + friend bool operator==(const BinaryIntOp&, const BinaryIntOp&); std::vector bincodeSerialize() const; - static Div bincodeDeserialize(std::vector); + static BinaryIntOp bincodeDeserialize(std::vector); }; - struct IntegerDiv { - friend bool operator==(const IntegerDiv&, const IntegerDiv&); - std::vector bincodeSerialize() const; - static IntegerDiv bincodeDeserialize(std::vector); - }; + struct Cast { + Program::MemoryAddress destination; + Program::MemoryAddress source; + Program::BitSize bit_size; - struct Equals { - friend bool operator==(const Equals&, const Equals&); + friend bool operator==(const Cast&, const Cast&); std::vector bincodeSerialize() const; - static Equals bincodeDeserialize(std::vector); + static Cast bincodeDeserialize(std::vector); }; - struct LessThan { - friend bool operator==(const LessThan&, const LessThan&); + struct JumpIfNot { + Program::MemoryAddress condition; + uint64_t location; + + friend bool operator==(const JumpIfNot&, const JumpIfNot&); std::vector bincodeSerialize() const; - static LessThan bincodeDeserialize(std::vector); + static JumpIfNot bincodeDeserialize(std::vector); }; - struct LessThanEquals { - friend bool operator==(const LessThanEquals&, const LessThanEquals&); + struct JumpIf { + Program::MemoryAddress condition; + uint64_t location; + + friend bool operator==(const JumpIf&, const JumpIf&); std::vector bincodeSerialize() const; - static LessThanEquals bincodeDeserialize(std::vector); + static JumpIf bincodeDeserialize(std::vector); }; - std::variant value; + struct Jump { + uint64_t location; - friend bool operator==(const BinaryFieldOp&, const BinaryFieldOp&); - std::vector bincodeSerialize() const; - static BinaryFieldOp bincodeDeserialize(std::vector); - }; + friend bool operator==(const Jump&, const Jump&); + std::vector bincodeSerialize() const; + static Jump bincodeDeserialize(std::vector); + }; - struct BinaryIntOp { + struct CalldataCopy { + Program::MemoryAddress destination_address; + uint64_t size; + uint64_t offset; - struct Add { - friend bool operator==(const Add&, const Add&); + friend bool operator==(const CalldataCopy&, const CalldataCopy&); std::vector bincodeSerialize() const; - static Add bincodeDeserialize(std::vector); + static CalldataCopy bincodeDeserialize(std::vector); }; - struct Sub { - friend bool operator==(const Sub&, const Sub&); + struct Call { + uint64_t location; + + friend bool operator==(const Call&, const Call&); std::vector bincodeSerialize() const; - static Sub bincodeDeserialize(std::vector); + static Call bincodeDeserialize(std::vector); }; - struct Mul { - friend bool operator==(const Mul&, const Mul&); + struct Const { + Program::MemoryAddress destination; + Program::BitSize bit_size; + std::string value; + + friend bool operator==(const Const&, const Const&); std::vector bincodeSerialize() const; - static Mul bincodeDeserialize(std::vector); + static Const bincodeDeserialize(std::vector); }; - struct Div { - friend bool operator==(const Div&, const Div&); + struct Return { + friend bool operator==(const Return&, const Return&); std::vector bincodeSerialize() const; - static Div bincodeDeserialize(std::vector); + static Return bincodeDeserialize(std::vector); }; - struct Equals { - friend bool operator==(const Equals&, const Equals&); + struct ForeignCall { + std::string function; + std::vector destinations; + std::vector destination_value_types; + std::vector inputs; + std::vector input_value_types; + + friend bool operator==(const ForeignCall&, const ForeignCall&); std::vector bincodeSerialize() const; - static Equals bincodeDeserialize(std::vector); + static ForeignCall bincodeDeserialize(std::vector); }; - struct LessThan { - friend bool operator==(const LessThan&, const LessThan&); + struct Mov { + Program::MemoryAddress destination; + Program::MemoryAddress source; + + friend bool operator==(const Mov&, const Mov&); std::vector bincodeSerialize() const; - static LessThan bincodeDeserialize(std::vector); + static Mov bincodeDeserialize(std::vector); }; - struct LessThanEquals { - friend bool operator==(const LessThanEquals&, const LessThanEquals&); + struct ConditionalMov { + Program::MemoryAddress destination; + Program::MemoryAddress source_a; + Program::MemoryAddress source_b; + Program::MemoryAddress condition; + + friend bool operator==(const ConditionalMov&, const ConditionalMov&); std::vector bincodeSerialize() const; - static LessThanEquals bincodeDeserialize(std::vector); + static ConditionalMov bincodeDeserialize(std::vector); }; - struct And { - friend bool operator==(const And&, const And&); + struct Load { + Program::MemoryAddress destination; + Program::MemoryAddress source_pointer; + + friend bool operator==(const Load&, const Load&); std::vector bincodeSerialize() const; - static And bincodeDeserialize(std::vector); + static Load bincodeDeserialize(std::vector); }; - struct Or { - friend bool operator==(const Or&, const Or&); + struct Store { + Program::MemoryAddress destination_pointer; + Program::MemoryAddress source; + + friend bool operator==(const Store&, const Store&); std::vector bincodeSerialize() const; - static Or bincodeDeserialize(std::vector); + static Store bincodeDeserialize(std::vector); }; - struct Xor { - friend bool operator==(const Xor&, const Xor&); + struct BlackBox { + Program::BlackBoxOp value; + + friend bool operator==(const BlackBox&, const BlackBox&); std::vector bincodeSerialize() const; - static Xor bincodeDeserialize(std::vector); + static BlackBox bincodeDeserialize(std::vector); }; - struct Shl { - friend bool operator==(const Shl&, const Shl&); + struct Trap { + Program::HeapArray revert_data; + + friend bool operator==(const Trap&, const Trap&); std::vector bincodeSerialize() const; - static Shl bincodeDeserialize(std::vector); + static Trap bincodeDeserialize(std::vector); }; - struct Shr { - friend bool operator==(const Shr&, const Shr&); + struct Stop { + uint64_t return_data_offset; + uint64_t return_data_size; + + friend bool operator==(const Stop&, const Stop&); std::vector bincodeSerialize() const; - static Shr bincodeDeserialize(std::vector); + static Stop bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; - friend bool operator==(const BinaryIntOp&, const BinaryIntOp&); + friend bool operator==(const BrilligOpcode&, const BrilligOpcode&); std::vector bincodeSerialize() const; - static BinaryIntOp bincodeDeserialize(std::vector); + static BrilligOpcode bincodeDeserialize(std::vector); }; - struct MemoryAddress { - uint64_t value; + struct Witness { + uint32_t value; - friend bool operator==(const MemoryAddress&, const MemoryAddress&); + friend bool operator==(const Witness&, const Witness&); std::vector bincodeSerialize() const; - static MemoryAddress bincodeDeserialize(std::vector); + static Witness bincodeDeserialize(std::vector); }; - struct HeapArray { - Program::MemoryAddress pointer; - uint64_t size; + struct ConstantOrWitnessEnum { - friend bool operator==(const HeapArray&, const HeapArray&); + struct Constant { + std::string value; + + friend bool operator==(const Constant&, const Constant&); + std::vector bincodeSerialize() const; + static Constant bincodeDeserialize(std::vector); + }; + + struct Witness { + Program::Witness value; + + friend bool operator==(const Witness&, const Witness&); + std::vector bincodeSerialize() const; + static Witness bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const ConstantOrWitnessEnum&, const ConstantOrWitnessEnum&); std::vector bincodeSerialize() const; - static HeapArray bincodeDeserialize(std::vector); + static ConstantOrWitnessEnum bincodeDeserialize(std::vector); }; - struct HeapVector { - Program::MemoryAddress pointer; - Program::MemoryAddress size; + struct FunctionInput { + Program::ConstantOrWitnessEnum input; + uint32_t num_bits; - friend bool operator==(const HeapVector&, const HeapVector&); + friend bool operator==(const FunctionInput&, const FunctionInput&); std::vector bincodeSerialize() const; - static HeapVector bincodeDeserialize(std::vector); + static FunctionInput bincodeDeserialize(std::vector); }; - struct BlackBoxOp { + struct BlackBoxFuncCall { struct AES128Encrypt { - Program::HeapVector inputs; - Program::HeapArray iv; - Program::HeapArray key; - Program::HeapVector outputs; + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); std::vector bincodeSerialize() const; static AES128Encrypt bincodeDeserialize(std::vector); }; - struct Sha256 { - Program::HeapVector message; - Program::HeapArray output; + struct AND { + Program::FunctionInput lhs; + Program::FunctionInput rhs; + Program::Witness output; - friend bool operator==(const Sha256&, const Sha256&); + friend bool operator==(const AND&, const AND&); std::vector bincodeSerialize() const; - static Sha256 bincodeDeserialize(std::vector); + static AND bincodeDeserialize(std::vector); + }; + + struct XOR { + Program::FunctionInput lhs; + Program::FunctionInput rhs; + Program::Witness output; + + friend bool operator==(const XOR&, const XOR&); + std::vector bincodeSerialize() const; + static XOR bincodeDeserialize(std::vector); + }; + + struct RANGE { + Program::FunctionInput input; + + friend bool operator==(const RANGE&, const RANGE&); + std::vector bincodeSerialize() const; + static RANGE bincodeDeserialize(std::vector); + }; + + struct SHA256 { + std::vector inputs; + std::array outputs; + + friend bool operator==(const SHA256&, const SHA256&); + std::vector bincodeSerialize() const; + static SHA256 bincodeDeserialize(std::vector); }; struct Blake2s { - Program::HeapVector message; - Program::HeapArray output; + std::vector inputs; + std::array outputs; friend bool operator==(const Blake2s&, const Blake2s&); std::vector bincodeSerialize() const; @@ -710,38 +825,52 @@ namespace Program { }; struct Blake3 { - Program::HeapVector message; - Program::HeapArray output; + std::vector inputs; + std::array outputs; friend bool operator==(const Blake3&, const Blake3&); std::vector bincodeSerialize() const; static Blake3 bincodeDeserialize(std::vector); }; - struct Keccak256 { - Program::HeapVector message; - Program::HeapArray output; + struct SchnorrVerify { + Program::FunctionInput public_key_x; + Program::FunctionInput public_key_y; + std::array signature; + std::vector message; + Program::Witness output; - friend bool operator==(const Keccak256&, const Keccak256&); + friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); std::vector bincodeSerialize() const; - static Keccak256 bincodeDeserialize(std::vector); + static SchnorrVerify bincodeDeserialize(std::vector); }; - struct Keccakf1600 { - Program::HeapVector message; - Program::HeapArray output; + struct PedersenCommitment { + std::vector inputs; + uint32_t domain_separator; + std::array outputs; - friend bool operator==(const Keccakf1600&, const Keccakf1600&); + friend bool operator==(const PedersenCommitment&, const PedersenCommitment&); std::vector bincodeSerialize() const; - static Keccakf1600 bincodeDeserialize(std::vector); + static PedersenCommitment bincodeDeserialize(std::vector); + }; + + struct PedersenHash { + std::vector inputs; + uint32_t domain_separator; + Program::Witness output; + + friend bool operator==(const PedersenHash&, const PedersenHash&); + std::vector bincodeSerialize() const; + static PedersenHash bincodeDeserialize(std::vector); }; struct EcdsaSecp256k1 { - Program::HeapVector hashed_msg; - Program::HeapArray public_key_x; - Program::HeapArray public_key_y; - Program::HeapArray signature; - Program::MemoryAddress result; + std::array public_key_x; + std::array public_key_y; + std::array signature; + std::array hashed_message; + Program::Witness output; friend bool operator==(const EcdsaSecp256k1&, const EcdsaSecp256k1&); std::vector bincodeSerialize() const; @@ -749,77 +878,71 @@ namespace Program { }; struct EcdsaSecp256r1 { - Program::HeapVector hashed_msg; - Program::HeapArray public_key_x; - Program::HeapArray public_key_y; - Program::HeapArray signature; - Program::MemoryAddress result; + std::array public_key_x; + std::array public_key_y; + std::array signature; + std::array hashed_message; + Program::Witness output; friend bool operator==(const EcdsaSecp256r1&, const EcdsaSecp256r1&); std::vector bincodeSerialize() const; static EcdsaSecp256r1 bincodeDeserialize(std::vector); }; - struct SchnorrVerify { - Program::MemoryAddress public_key_x; - Program::MemoryAddress public_key_y; - Program::HeapVector message; - Program::HeapVector signature; - Program::MemoryAddress result; + struct MultiScalarMul { + std::vector points; + std::vector scalars; + std::array outputs; - friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); + friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; - static SchnorrVerify bincodeDeserialize(std::vector); + static MultiScalarMul bincodeDeserialize(std::vector); }; - struct PedersenCommitment { - Program::HeapVector inputs; - Program::MemoryAddress domain_separator; - Program::HeapArray output; + struct EmbeddedCurveAdd { + std::array input1; + std::array input2; + std::array outputs; - friend bool operator==(const PedersenCommitment&, const PedersenCommitment&); + friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); std::vector bincodeSerialize() const; - static PedersenCommitment bincodeDeserialize(std::vector); + static EmbeddedCurveAdd bincodeDeserialize(std::vector); }; - struct PedersenHash { - Program::HeapVector inputs; - Program::MemoryAddress domain_separator; - Program::MemoryAddress output; + struct Keccak256 { + std::vector inputs; + Program::FunctionInput var_message_size; + std::array outputs; - friend bool operator==(const PedersenHash&, const PedersenHash&); + friend bool operator==(const Keccak256&, const Keccak256&); std::vector bincodeSerialize() const; - static PedersenHash bincodeDeserialize(std::vector); + static Keccak256 bincodeDeserialize(std::vector); }; - struct MultiScalarMul { - Program::HeapVector points; - Program::HeapVector scalars; - Program::HeapArray outputs; + struct Keccakf1600 { + std::array inputs; + std::array outputs; - friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); + friend bool operator==(const Keccakf1600&, const Keccakf1600&); std::vector bincodeSerialize() const; - static MultiScalarMul bincodeDeserialize(std::vector); + static Keccakf1600 bincodeDeserialize(std::vector); }; - struct EmbeddedCurveAdd { - Program::MemoryAddress input1_x; - Program::MemoryAddress input1_y; - Program::MemoryAddress input1_infinite; - Program::MemoryAddress input2_x; - Program::MemoryAddress input2_y; - Program::MemoryAddress input2_infinite; - Program::HeapArray result; + struct RecursiveAggregation { + std::vector verification_key; + std::vector proof; + std::vector public_inputs; + Program::FunctionInput key_hash; - friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); + friend bool operator==(const RecursiveAggregation&, const RecursiveAggregation&); std::vector bincodeSerialize() const; - static EmbeddedCurveAdd bincodeDeserialize(std::vector); + static RecursiveAggregation bincodeDeserialize(std::vector); }; struct BigIntAdd { - Program::MemoryAddress lhs; - Program::MemoryAddress rhs; - Program::MemoryAddress output; + uint32_t lhs; + uint32_t rhs; + uint32_t output; friend bool operator==(const BigIntAdd&, const BigIntAdd&); std::vector bincodeSerialize() const; @@ -827,9 +950,9 @@ namespace Program { }; struct BigIntSub { - Program::MemoryAddress lhs; - Program::MemoryAddress rhs; - Program::MemoryAddress output; + uint32_t lhs; + uint32_t rhs; + uint32_t output; friend bool operator==(const BigIntSub&, const BigIntSub&); std::vector bincodeSerialize() const; @@ -837,9 +960,9 @@ namespace Program { }; struct BigIntMul { - Program::MemoryAddress lhs; - Program::MemoryAddress rhs; - Program::MemoryAddress output; + uint32_t lhs; + uint32_t rhs; + uint32_t output; friend bool operator==(const BigIntMul&, const BigIntMul&); std::vector bincodeSerialize() const; @@ -847,9 +970,9 @@ namespace Program { }; struct BigIntDiv { - Program::MemoryAddress lhs; - Program::MemoryAddress rhs; - Program::MemoryAddress output; + uint32_t lhs; + uint32_t rhs; + uint32_t output; friend bool operator==(const BigIntDiv&, const BigIntDiv&); std::vector bincodeSerialize() const; @@ -857,9 +980,9 @@ namespace Program { }; struct BigIntFromLeBytes { - Program::HeapVector inputs; - Program::HeapVector modulus; - Program::MemoryAddress output; + std::vector inputs; + std::vector modulus; + uint32_t output; friend bool operator==(const BigIntFromLeBytes&, const BigIntFromLeBytes&); std::vector bincodeSerialize() const; @@ -867,8 +990,8 @@ namespace Program { }; struct BigIntToLeBytes { - Program::MemoryAddress input; - Program::HeapVector output; + uint32_t input; + std::vector outputs; friend bool operator==(const BigIntToLeBytes&, const BigIntToLeBytes&); std::vector bincodeSerialize() const; @@ -876,9 +999,9 @@ namespace Program { }; struct Poseidon2Permutation { - Program::HeapVector message; - Program::HeapArray output; - Program::MemoryAddress len; + std::vector inputs; + std::vector outputs; + uint32_t len; friend bool operator==(const Poseidon2Permutation&, const Poseidon2Permutation&); std::vector bincodeSerialize() const; @@ -886,276 +1009,227 @@ namespace Program { }; struct Sha256Compression { - Program::HeapVector input; - Program::HeapVector hash_values; - Program::HeapArray output; + std::array inputs; + std::array hash_values; + std::array outputs; friend bool operator==(const Sha256Compression&, const Sha256Compression&); std::vector bincodeSerialize() const; static Sha256Compression bincodeDeserialize(std::vector); }; - struct ToRadix { - Program::MemoryAddress input; - uint32_t radix; - Program::HeapArray output; - - friend bool operator==(const ToRadix&, const ToRadix&); - std::vector bincodeSerialize() const; - static ToRadix bincodeDeserialize(std::vector); - }; - - std::variant value; + std::variant value; - friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); + friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; - static BlackBoxOp bincodeDeserialize(std::vector); + static BlackBoxFuncCall bincodeDeserialize(std::vector); }; - struct HeapValueType; + struct BlockId { + uint32_t value; - struct HeapValueType { + friend bool operator==(const BlockId&, const BlockId&); + std::vector bincodeSerialize() const; + static BlockId bincodeDeserialize(std::vector); + }; - struct Simple { - uint32_t value; + struct BlockType { - friend bool operator==(const Simple&, const Simple&); + struct Memory { + friend bool operator==(const Memory&, const Memory&); std::vector bincodeSerialize() const; - static Simple bincodeDeserialize(std::vector); + static Memory bincodeDeserialize(std::vector); }; - struct Array { - std::vector value_types; - uint64_t size; - - friend bool operator==(const Array&, const Array&); + struct CallData { + friend bool operator==(const CallData&, const CallData&); std::vector bincodeSerialize() const; - static Array bincodeDeserialize(std::vector); + static CallData bincodeDeserialize(std::vector); }; - struct Vector { - std::vector value_types; - - friend bool operator==(const Vector&, const Vector&); + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); std::vector bincodeSerialize() const; - static Vector bincodeDeserialize(std::vector); + static ReturnData bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; - friend bool operator==(const HeapValueType&, const HeapValueType&); + friend bool operator==(const BlockType&, const BlockType&); std::vector bincodeSerialize() const; - static HeapValueType bincodeDeserialize(std::vector); + static BlockType bincodeDeserialize(std::vector); }; - struct ValueOrArray { - - struct MemoryAddress { - Program::MemoryAddress value; - - friend bool operator==(const MemoryAddress&, const MemoryAddress&); - std::vector bincodeSerialize() const; - static MemoryAddress bincodeDeserialize(std::vector); - }; - - struct HeapArray { - Program::HeapArray value; - - friend bool operator==(const HeapArray&, const HeapArray&); - std::vector bincodeSerialize() const; - static HeapArray bincodeDeserialize(std::vector); - }; - - struct HeapVector { - Program::HeapVector value; - - friend bool operator==(const HeapVector&, const HeapVector&); - std::vector bincodeSerialize() const; - static HeapVector bincodeDeserialize(std::vector); - }; - - std::variant value; + struct Expression { + std::vector> mul_terms; + std::vector> linear_combinations; + std::string q_c; - friend bool operator==(const ValueOrArray&, const ValueOrArray&); + friend bool operator==(const Expression&, const Expression&); std::vector bincodeSerialize() const; - static ValueOrArray bincodeDeserialize(std::vector); + static Expression bincodeDeserialize(std::vector); }; - struct BrilligOpcode { + struct BrilligInputs { - struct BinaryFieldOp { - Program::MemoryAddress destination; - Program::BinaryFieldOp op; - Program::MemoryAddress lhs; - Program::MemoryAddress rhs; + struct Single { + Program::Expression value; - friend bool operator==(const BinaryFieldOp&, const BinaryFieldOp&); + friend bool operator==(const Single&, const Single&); std::vector bincodeSerialize() const; - static BinaryFieldOp bincodeDeserialize(std::vector); + static Single bincodeDeserialize(std::vector); }; - struct BinaryIntOp { - Program::MemoryAddress destination; - Program::BinaryIntOp op; - uint32_t bit_size; - Program::MemoryAddress lhs; - Program::MemoryAddress rhs; + struct Array { + std::vector value; - friend bool operator==(const BinaryIntOp&, const BinaryIntOp&); + friend bool operator==(const Array&, const Array&); std::vector bincodeSerialize() const; - static BinaryIntOp bincodeDeserialize(std::vector); + static Array bincodeDeserialize(std::vector); }; - struct Cast { - Program::MemoryAddress destination; - Program::MemoryAddress source; - uint32_t bit_size; + struct MemoryArray { + Program::BlockId value; - friend bool operator==(const Cast&, const Cast&); + friend bool operator==(const MemoryArray&, const MemoryArray&); std::vector bincodeSerialize() const; - static Cast bincodeDeserialize(std::vector); + static MemoryArray bincodeDeserialize(std::vector); }; - struct JumpIfNot { - Program::MemoryAddress condition; - uint64_t location; + std::variant value; - friend bool operator==(const JumpIfNot&, const JumpIfNot&); - std::vector bincodeSerialize() const; - static JumpIfNot bincodeDeserialize(std::vector); - }; + friend bool operator==(const BrilligInputs&, const BrilligInputs&); + std::vector bincodeSerialize() const; + static BrilligInputs bincodeDeserialize(std::vector); + }; - struct JumpIf { - Program::MemoryAddress condition; - uint64_t location; + struct BrilligOutputs { + + struct Simple { + Program::Witness value; - friend bool operator==(const JumpIf&, const JumpIf&); + friend bool operator==(const Simple&, const Simple&); std::vector bincodeSerialize() const; - static JumpIf bincodeDeserialize(std::vector); + static Simple bincodeDeserialize(std::vector); }; - struct Jump { - uint64_t location; + struct Array { + std::vector value; - friend bool operator==(const Jump&, const Jump&); + friend bool operator==(const Array&, const Array&); std::vector bincodeSerialize() const; - static Jump bincodeDeserialize(std::vector); + static Array bincodeDeserialize(std::vector); }; - struct CalldataCopy { - Program::MemoryAddress destination_address; - uint64_t size; - uint64_t offset; + std::variant value; - friend bool operator==(const CalldataCopy&, const CalldataCopy&); - std::vector bincodeSerialize() const; - static CalldataCopy bincodeDeserialize(std::vector); - }; + friend bool operator==(const BrilligOutputs&, const BrilligOutputs&); + std::vector bincodeSerialize() const; + static BrilligOutputs bincodeDeserialize(std::vector); + }; - struct Call { - uint64_t location; + struct Directive { - friend bool operator==(const Call&, const Call&); + struct ToLeRadix { + Program::Expression a; + std::vector b; + uint32_t radix; + + friend bool operator==(const ToLeRadix&, const ToLeRadix&); std::vector bincodeSerialize() const; - static Call bincodeDeserialize(std::vector); + static ToLeRadix bincodeDeserialize(std::vector); }; - struct Const { - Program::MemoryAddress destination; - uint32_t bit_size; - std::string value; + std::variant value; - friend bool operator==(const Const&, const Const&); - std::vector bincodeSerialize() const; - static Const bincodeDeserialize(std::vector); - }; + friend bool operator==(const Directive&, const Directive&); + std::vector bincodeSerialize() const; + static Directive bincodeDeserialize(std::vector); + }; - struct Return { - friend bool operator==(const Return&, const Return&); - std::vector bincodeSerialize() const; - static Return bincodeDeserialize(std::vector); - }; + struct MemOp { + Program::Expression operation; + Program::Expression index; + Program::Expression value; - struct ForeignCall { - std::string function; - std::vector destinations; - std::vector destination_value_types; - std::vector inputs; - std::vector input_value_types; + friend bool operator==(const MemOp&, const MemOp&); + std::vector bincodeSerialize() const; + static MemOp bincodeDeserialize(std::vector); + }; - friend bool operator==(const ForeignCall&, const ForeignCall&); - std::vector bincodeSerialize() const; - static ForeignCall bincodeDeserialize(std::vector); - }; + struct Opcode { - struct Mov { - Program::MemoryAddress destination; - Program::MemoryAddress source; + struct AssertZero { + Program::Expression value; - friend bool operator==(const Mov&, const Mov&); + friend bool operator==(const AssertZero&, const AssertZero&); std::vector bincodeSerialize() const; - static Mov bincodeDeserialize(std::vector); + static AssertZero bincodeDeserialize(std::vector); }; - struct ConditionalMov { - Program::MemoryAddress destination; - Program::MemoryAddress source_a; - Program::MemoryAddress source_b; - Program::MemoryAddress condition; + struct BlackBoxFuncCall { + Program::BlackBoxFuncCall value; - friend bool operator==(const ConditionalMov&, const ConditionalMov&); + friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; - static ConditionalMov bincodeDeserialize(std::vector); + static BlackBoxFuncCall bincodeDeserialize(std::vector); }; - struct Load { - Program::MemoryAddress destination; - Program::MemoryAddress source_pointer; + struct Directive { + Program::Directive value; - friend bool operator==(const Load&, const Load&); + friend bool operator==(const Directive&, const Directive&); std::vector bincodeSerialize() const; - static Load bincodeDeserialize(std::vector); + static Directive bincodeDeserialize(std::vector); }; - struct Store { - Program::MemoryAddress destination_pointer; - Program::MemoryAddress source; + struct MemoryOp { + Program::BlockId block_id; + Program::MemOp op; + std::optional predicate; - friend bool operator==(const Store&, const Store&); + friend bool operator==(const MemoryOp&, const MemoryOp&); std::vector bincodeSerialize() const; - static Store bincodeDeserialize(std::vector); + static MemoryOp bincodeDeserialize(std::vector); }; - struct BlackBox { - Program::BlackBoxOp value; + struct MemoryInit { + Program::BlockId block_id; + std::vector init; + Program::BlockType block_type; - friend bool operator==(const BlackBox&, const BlackBox&); + friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; - static BlackBox bincodeDeserialize(std::vector); + static MemoryInit bincodeDeserialize(std::vector); }; - struct Trap { - Program::HeapArray revert_data; + struct BrilligCall { + uint32_t id; + std::vector inputs; + std::vector outputs; + std::optional predicate; - friend bool operator==(const Trap&, const Trap&); + friend bool operator==(const BrilligCall&, const BrilligCall&); std::vector bincodeSerialize() const; - static Trap bincodeDeserialize(std::vector); + static BrilligCall bincodeDeserialize(std::vector); }; - struct Stop { - uint64_t return_data_offset; - uint64_t return_data_size; + struct Call { + uint32_t id; + std::vector inputs; + std::vector outputs; + std::optional predicate; - friend bool operator==(const Stop&, const Stop&); + friend bool operator==(const Call&, const Call&); std::vector bincodeSerialize() const; - static Stop bincodeDeserialize(std::vector); + static Call bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; - friend bool operator==(const BrilligOpcode&, const BrilligOpcode&); + friend bool operator==(const Opcode&, const Opcode&); std::vector bincodeSerialize() const; - static BrilligOpcode bincodeDeserialize(std::vector); + static Opcode bincodeDeserialize(std::vector); }; struct ExpressionOrMemory { @@ -2103,15 +2177,121 @@ namespace Program { return true; } - inline std::vector BinaryIntOp::Xor::bincodeSerialize() const { + inline std::vector BinaryIntOp::Xor::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BinaryIntOp::Xor BinaryIntOp::Xor::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BinaryIntOp::Xor &obj, Serializer &serializer) { +} + +template <> +template +Program::BinaryIntOp::Xor serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BinaryIntOp::Xor obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BinaryIntOp::Shl &lhs, const BinaryIntOp::Shl &rhs) { + return true; + } + + inline std::vector BinaryIntOp::Shl::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BinaryIntOp::Shl BinaryIntOp::Shl::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BinaryIntOp::Shl &obj, Serializer &serializer) { +} + +template <> +template +Program::BinaryIntOp::Shl serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BinaryIntOp::Shl obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BinaryIntOp::Shr &lhs, const BinaryIntOp::Shr &rhs) { + return true; + } + + inline std::vector BinaryIntOp::Shr::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BinaryIntOp::Shr BinaryIntOp::Shr::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BinaryIntOp::Shr &obj, Serializer &serializer) { +} + +template <> +template +Program::BinaryIntOp::Shr serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BinaryIntOp::Shr obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BitSize &lhs, const BitSize &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector BitSize::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BinaryIntOp::Xor BinaryIntOp::Xor::bincodeDeserialize(std::vector input) { + inline BitSize BitSize::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -2122,31 +2302,37 @@ namespace Program { template <> template -void serde::Serializable::serialize(const Program::BinaryIntOp::Xor &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Program::BitSize &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); } template <> template -Program::BinaryIntOp::Xor serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BinaryIntOp::Xor obj; +Program::BitSize serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Program::BitSize obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); return obj; } namespace Program { - inline bool operator==(const BinaryIntOp::Shl &lhs, const BinaryIntOp::Shl &rhs) { + inline bool operator==(const BitSize::Field &lhs, const BitSize::Field &rhs) { return true; } - inline std::vector BinaryIntOp::Shl::bincodeSerialize() const { + inline std::vector BitSize::Field::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BinaryIntOp::Shl BinaryIntOp::Shl::bincodeDeserialize(std::vector input) { + inline BitSize::Field BitSize::Field::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -2157,31 +2343,32 @@ namespace Program { template <> template -void serde::Serializable::serialize(const Program::BinaryIntOp::Shl &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Program::BitSize::Field &obj, Serializer &serializer) { } template <> template -Program::BinaryIntOp::Shl serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BinaryIntOp::Shl obj; +Program::BitSize::Field serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BitSize::Field obj; return obj; } namespace Program { - inline bool operator==(const BinaryIntOp::Shr &lhs, const BinaryIntOp::Shr &rhs) { + inline bool operator==(const BitSize::Integer &lhs, const BitSize::Integer &rhs) { + if (!(lhs.value == rhs.value)) { return false; } return true; } - inline std::vector BinaryIntOp::Shr::bincodeSerialize() const { + inline std::vector BitSize::Integer::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BinaryIntOp::Shr BinaryIntOp::Shr::bincodeDeserialize(std::vector input) { + inline BitSize::Integer BitSize::Integer::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -2192,13 +2379,15 @@ namespace Program { template <> template -void serde::Serializable::serialize(const Program::BinaryIntOp::Shr &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Program::BitSize::Integer &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.value, serializer); } template <> template -Program::BinaryIntOp::Shr serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BinaryIntOp::Shr obj; +Program::BitSize::Integer serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BitSize::Integer obj; + obj.value = serde::Deserializable::deserialize(deserializer); return obj; } @@ -6520,6 +6709,293 @@ Program::HeapVector serde::Deserializable::deserialize(Dese return obj; } +namespace Program { + + inline bool operator==(const IntegerBitSize &lhs, const IntegerBitSize &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector IntegerBitSize::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize IntegerBitSize::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::IntegerBitSize serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Program::IntegerBitSize obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U0 &lhs, const IntegerBitSize::U0 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U0::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U0 IntegerBitSize::U0::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U0 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U0 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U0 obj; + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U1 &lhs, const IntegerBitSize::U1 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U1::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U1 IntegerBitSize::U1::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U1 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U1 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U1 obj; + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U8 &lhs, const IntegerBitSize::U8 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U8::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U8 IntegerBitSize::U8::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U8 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U8 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U8 obj; + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U16 &lhs, const IntegerBitSize::U16 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U16::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U16 IntegerBitSize::U16::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U16 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U16 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U16 obj; + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U32 &lhs, const IntegerBitSize::U32 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U32::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U32 IntegerBitSize::U32::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U32 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U32 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U32 obj; + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U64 &lhs, const IntegerBitSize::U64 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U64::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U64 IntegerBitSize::U64::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U64 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U64 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U64 obj; + return obj; +} + +namespace Program { + + inline bool operator==(const IntegerBitSize::U128 &lhs, const IntegerBitSize::U128 &rhs) { + return true; + } + + inline std::vector IntegerBitSize::U128::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline IntegerBitSize::U128 IntegerBitSize::U128::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::IntegerBitSize::U128 &obj, Serializer &serializer) { +} + +template <> +template +Program::IntegerBitSize::U128 serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::IntegerBitSize::U128 obj; + return obj; +} + namespace Program { inline bool operator==(const MemOp &lhs, const MemOp &rhs) { diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 6478f0c7a19..6a301ec5115 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -277,7 +277,8 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::BigIntDiv { .. } | BlackBoxFuncCall::BigIntToLeBytes { .. } => Vec::new(), BlackBoxFuncCall::MultiScalarMul { points, scalars, .. } => { - let mut inputs: Vec> = Vec::with_capacity(points.len() * 2); + let mut inputs: Vec> = + Vec::with_capacity(points.len() + scalars.len()); inputs.extend(points.iter().copied()); inputs.extend(scalars.iter().copied()); inputs diff --git a/acvm-repo/acir/src/lib.rs b/acvm-repo/acir/src/lib.rs index 540e0f07eb5..845a1d6ad5a 100644 --- a/acvm-repo/acir/src/lib.rs +++ b/acvm-repo/acir/src/lib.rs @@ -33,8 +33,8 @@ mod reflection { use acir_field::FieldElement; use brillig::{ - BinaryFieldOp, BinaryIntOp, BlackBoxOp, HeapValueType, Opcode as BrilligOpcode, - ValueOrArray, + BinaryFieldOp, BinaryIntOp, BitSize, BlackBoxOp, HeapValueType, IntegerBitSize, + Opcode as BrilligOpcode, ValueOrArray, }; use serde_reflection::{Tracer, TracerConfig}; @@ -81,6 +81,8 @@ mod reflection { tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::>().unwrap(); tracer.trace_simple_type::>().unwrap(); + tracer.trace_simple_type::().unwrap(); + tracer.trace_simple_type::().unwrap(); let registry = tracer.registry().unwrap(); diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index 3047ac002e8..3610ce6493e 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -20,7 +20,7 @@ use acir::{ native_types::{Expression, Witness}, }; use acir_field::{AcirField, FieldElement}; -use brillig::{HeapArray, HeapValueType, MemoryAddress, ValueOrArray}; +use brillig::{BitSize, HeapArray, HeapValueType, IntegerBitSize, MemoryAddress, ValueOrArray}; #[test] fn addition_circuit() { @@ -204,12 +204,11 @@ fn simple_brillig_foreign_call() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 193, 10, 192, 32, 8, 134, 117, 99, 99, 236, - 182, 55, 105, 111, 176, 151, 217, 161, 75, 135, 136, 30, 63, 42, 82, 144, 8, 47, 245, 65, - 252, 230, 47, 162, 34, 52, 174, 242, 144, 226, 131, 148, 255, 18, 206, 125, 164, 102, 142, - 23, 215, 245, 50, 114, 222, 173, 15, 80, 38, 65, 217, 108, 39, 61, 7, 30, 115, 11, 223, - 186, 248, 251, 160, 221, 170, 146, 64, 191, 39, 215, 60, 3, 47, 3, 99, 171, 188, 84, 164, - 1, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 80, 49, 10, 192, 32, 12, 52, 45, 45, 165, 155, 63, + 209, 31, 248, 25, 7, 23, 7, 17, 223, 175, 96, 2, 65, 162, 139, 30, 132, 203, 221, 65, 72, + 2, 170, 227, 107, 5, 216, 63, 200, 164, 57, 200, 115, 200, 102, 15, 22, 206, 205, 50, 124, + 223, 107, 108, 128, 155, 106, 113, 217, 141, 252, 10, 25, 225, 103, 121, 136, 197, 167, + 188, 250, 213, 76, 75, 158, 22, 178, 10, 176, 188, 242, 119, 164, 1, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -239,7 +238,7 @@ fn complex_brillig_foreign_call() { brillig::Opcode::Const { destination: MemoryAddress(0), value: FieldElement::from(32_usize), - bit_size: 32, + bit_size: BitSize::Integer(IntegerBitSize::U32), }, brillig::Opcode::CalldataCopy { destination_address: MemoryAddress(1), @@ -308,15 +307,15 @@ fn complex_brillig_foreign_call() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 131, 64, 12, 77, 102, 90, 43, 221, 245, - 6, 133, 246, 0, 211, 158, 192, 187, 136, 59, 69, 151, 158, 94, 116, 48, 131, 241, 233, 70, - 28, 65, 3, 195, 155, 79, 62, 47, 9, 25, 166, 81, 210, 97, 177, 236, 239, 130, 70, 208, 223, - 91, 154, 75, 208, 205, 4, 221, 62, 249, 113, 60, 95, 238, 40, 142, 230, 2, 28, 237, 1, 28, - 73, 245, 255, 132, 253, 142, 217, 151, 168, 245, 179, 43, 243, 115, 163, 113, 190, 18, 57, - 63, 4, 83, 44, 180, 55, 50, 180, 28, 188, 153, 224, 196, 122, 175, 111, 112, 68, 24, 65, - 116, 178, 40, 89, 254, 93, 162, 120, 48, 196, 126, 170, 12, 243, 186, 106, 202, 162, 181, - 160, 138, 84, 63, 106, 255, 133, 119, 6, 187, 14, 108, 59, 133, 250, 243, 90, 139, 19, 238, - 205, 6, 223, 47, 154, 202, 27, 74, 222, 3, 234, 73, 242, 82, 65, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 132, 48, 12, 77, 90, 199, 145, 217, + 205, 13, 6, 102, 14, 208, 241, 4, 222, 69, 220, 41, 186, 244, 248, 90, 140, 24, 159, 5, 23, + 86, 208, 7, 37, 253, 228, 243, 146, 144, 50, 77, 200, 198, 197, 178, 127, 136, 52, 34, 253, + 189, 165, 53, 102, 221, 66, 164, 59, 134, 63, 199, 243, 229, 206, 226, 104, 110, 192, 209, + 158, 192, 145, 84, 255, 47, 216, 239, 152, 125, 137, 90, 63, 27, 152, 159, 132, 166, 249, + 74, 229, 252, 20, 153, 97, 161, 189, 145, 161, 237, 224, 173, 128, 19, 235, 189, 126, 192, + 17, 97, 4, 177, 75, 162, 101, 154, 187, 84, 113, 97, 136, 255, 82, 89, 150, 109, 211, 213, + 85, 111, 65, 21, 233, 126, 213, 254, 7, 239, 12, 118, 104, 171, 161, 63, 176, 144, 46, 7, + 244, 246, 124, 191, 105, 41, 241, 92, 246, 1, 235, 222, 207, 212, 69, 5, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/acvm-repo/acir_field/Cargo.toml b/acvm-repo/acir_field/Cargo.toml index 303d7b6471f..3696423979b 100644 --- a/acvm-repo/acir_field/Cargo.toml +++ b/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acir_field/src/field_element.rs b/acvm-repo/acir_field/src/field_element.rs index 3c16276adc9..92ccbc4e5f6 100644 --- a/acvm-repo/acir_field/src/field_element.rs +++ b/acvm-repo/acir_field/src/field_element.rs @@ -139,17 +139,25 @@ impl<'de, T: ark_ff::PrimeField> Deserialize<'de> for FieldElement { impl From for FieldElement { fn from(a: u128) -> FieldElement { - let result = match F::from_str(&a.to_string()) { - Ok(result) => result, - Err(_) => panic!("Cannot convert u128 as a string to a field element"), - }; - FieldElement(result) + FieldElement(F::from(a)) } } impl From for FieldElement { fn from(a: usize) -> FieldElement { - FieldElement::from(a as u128) + FieldElement::from(a as u64) + } +} + +impl From for FieldElement { + fn from(a: u64) -> FieldElement { + FieldElement(F::from(a)) + } +} + +impl From for FieldElement { + fn from(a: u32) -> FieldElement { + FieldElement(F::from(a)) } } @@ -265,8 +273,16 @@ impl AcirField for FieldElement { } fn to_u128(self) -> u128 { - let bytes = self.to_be_bytes(); - u128::from_be_bytes(bytes[16..32].try_into().unwrap()) + let as_bigint = self.0.into_bigint(); + let limbs = as_bigint.as_ref(); + + let mut result = limbs[0] as u128; + if limbs.len() > 1 { + let high_limb = limbs[1] as u128; + result += high_limb << 64; + } + + result } fn try_into_u128(self) -> Option { diff --git a/acvm-repo/acvm/Cargo.toml b/acvm-repo/acvm/Cargo.toml index 892575902a4..5b6397a1011 100644 --- a/acvm-repo/acvm/Cargo.toml +++ b/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acvm/src/pwg/brillig.rs b/acvm-repo/acvm/src/pwg/brillig.rs index 91dedac8e35..635aa154c3e 100644 --- a/acvm-repo/acvm/src/pwg/brillig.rs +++ b/acvm-repo/acvm/src/pwg/brillig.rs @@ -264,6 +264,7 @@ fn extract_failure_payload_from_memory( let error_selector = ErrorSelector::new( revert_values_iter .next() + .copied() .expect("Incorrect revert data size") .try_into() .expect("Error selector is not u64"), @@ -273,7 +274,7 @@ fn extract_failure_payload_from_memory( STRING_ERROR_SELECTOR => { // If the error selector is 0, it means the error is a string let string = revert_values_iter - .map(|memory_value| { + .map(|&memory_value| { let as_u8: u8 = memory_value.try_into().expect("String item is not u8"); as_u8 as char }) diff --git a/acvm-repo/acvm_js/Cargo.toml b/acvm-repo/acvm_js/Cargo.toml index e457b6391ab..10ab2f62fdd 100644 --- a/acvm-repo/acvm_js/Cargo.toml +++ b/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index 6085bc0563e..fe192471744 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/acvm_js", - "version": "0.47.0", + "version": "0.48.0", "publishConfig": { "access": "public" }, diff --git a/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts b/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts index 8ec2ddd1cb2..82f983e407b 100644 --- a/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts +++ b/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts @@ -2,13 +2,13 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `complex_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 131, 64, 12, 77, 102, 90, 43, 221, 245, 6, 133, 246, 0, 211, 158, - 192, 187, 136, 59, 69, 151, 158, 94, 116, 48, 131, 241, 233, 70, 28, 65, 3, 195, 155, 79, 62, 47, 9, 25, 166, 81, 210, - 97, 177, 236, 239, 130, 70, 208, 223, 91, 154, 75, 208, 205, 4, 221, 62, 249, 113, 60, 95, 238, 40, 142, 230, 2, 28, - 237, 1, 28, 73, 245, 255, 132, 253, 142, 217, 151, 168, 245, 179, 43, 243, 115, 163, 113, 190, 18, 57, 63, 4, 83, 44, - 180, 55, 50, 180, 28, 188, 153, 224, 196, 122, 175, 111, 112, 68, 24, 65, 116, 178, 40, 89, 254, 93, 162, 120, 48, - 196, 126, 170, 12, 243, 186, 106, 202, 162, 181, 160, 138, 84, 63, 106, 255, 133, 119, 6, 187, 14, 108, 59, 133, 250, - 243, 90, 139, 19, 238, 205, 6, 223, 47, 154, 202, 27, 74, 222, 3, 234, 73, 242, 82, 65, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 132, 48, 12, 77, 90, 199, 145, 217, 205, 13, 6, 102, 14, 208, 241, + 4, 222, 69, 220, 41, 186, 244, 248, 90, 140, 24, 159, 5, 23, 86, 208, 7, 37, 253, 228, 243, 146, 144, 50, 77, 200, + 198, 197, 178, 127, 136, 52, 34, 253, 189, 165, 53, 102, 221, 66, 164, 59, 134, 63, 199, 243, 229, 206, 226, 104, 110, + 192, 209, 158, 192, 145, 84, 255, 47, 216, 239, 152, 125, 137, 90, 63, 27, 152, 159, 132, 166, 249, 74, 229, 252, 20, + 153, 97, 161, 189, 145, 161, 237, 224, 173, 128, 19, 235, 189, 126, 192, 17, 97, 4, 177, 75, 162, 101, 154, 187, 84, + 113, 97, 136, 255, 82, 89, 150, 109, 211, 213, 85, 111, 65, 21, 233, 126, 213, 254, 7, 239, 12, 118, 104, 171, 161, + 63, 176, 144, 46, 7, 244, 246, 124, 191, 105, 41, 241, 92, 246, 1, 235, 222, 207, 212, 69, 5, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/acvm-repo/acvm_js/test/shared/foreign_call.ts b/acvm-repo/acvm_js/test/shared/foreign_call.ts index 3c66ba18629..dad7c7e1568 100644 --- a/acvm-repo/acvm_js/test/shared/foreign_call.ts +++ b/acvm-repo/acvm_js/test/shared/foreign_call.ts @@ -2,10 +2,10 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `simple_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 193, 10, 192, 32, 8, 134, 117, 99, 99, 236, 182, 55, 105, 111, 176, 151, - 217, 161, 75, 135, 136, 30, 63, 42, 82, 144, 8, 47, 245, 65, 252, 230, 47, 162, 34, 52, 174, 242, 144, 226, 131, 148, - 255, 18, 206, 125, 164, 102, 142, 23, 215, 245, 50, 114, 222, 173, 15, 80, 38, 65, 217, 108, 39, 61, 7, 30, 115, 11, - 223, 186, 248, 251, 160, 221, 170, 146, 64, 191, 39, 215, 60, 3, 47, 3, 99, 171, 188, 84, 164, 1, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 80, 49, 10, 192, 32, 12, 52, 45, 45, 165, 155, 63, 209, 31, 248, 25, 7, 23, 7, + 17, 223, 175, 96, 2, 65, 162, 139, 30, 132, 203, 221, 65, 72, 2, 170, 227, 107, 5, 216, 63, 200, 164, 57, 200, 115, + 200, 102, 15, 22, 206, 205, 50, 124, 223, 107, 108, 128, 155, 106, 113, 217, 141, 252, 10, 25, 225, 103, 121, 136, + 197, 167, 188, 250, 213, 76, 75, 158, 22, 178, 10, 176, 188, 242, 119, 164, 1, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000005'], diff --git a/acvm-repo/blackbox_solver/Cargo.toml b/acvm-repo/blackbox_solver/Cargo.toml index 06bd3ceabef..c390c811788 100644 --- a/acvm-repo/blackbox_solver/Cargo.toml +++ b/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/blackbox_solver/src/bigint.rs b/acvm-repo/blackbox_solver/src/bigint.rs index 5b19f03a238..b8bc9dc0d70 100644 --- a/acvm-repo/blackbox_solver/src/bigint.rs +++ b/acvm-repo/blackbox_solver/src/bigint.rs @@ -18,7 +18,7 @@ pub struct BigIntSolver { } impl BigIntSolver { - pub(crate) fn get_bigint( + pub fn get_bigint( &self, id: u32, func: BlackBoxFunc, @@ -32,7 +32,7 @@ impl BigIntSolver { .cloned() } - pub(crate) fn get_modulus( + pub fn get_modulus( &self, id: u32, func: BlackBoxFunc, diff --git a/acvm-repo/bn254_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml index cc2d15aaa86..6129adada75 100644 --- a/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "bn254_blackbox_solver" description = "Solvers for black box functions which are specific for the bn254 curve" # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig/Cargo.toml b/acvm-repo/brillig/Cargo.toml index 7c1965c8f3e..7c962964303 100644 --- a/acvm-repo/brillig/Cargo.toml +++ b/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig/src/lib.rs b/acvm-repo/brillig/src/lib.rs index 40f2e15acfe..5bd9f898d59 100644 --- a/acvm-repo/brillig/src/lib.rs +++ b/acvm-repo/brillig/src/lib.rs @@ -19,4 +19,4 @@ pub use foreign_call::{ForeignCallParam, ForeignCallResult}; pub use opcodes::{ BinaryFieldOp, BinaryIntOp, HeapArray, HeapValueType, HeapVector, MemoryAddress, ValueOrArray, }; -pub use opcodes::{BrilligOpcode as Opcode, Label}; +pub use opcodes::{BitSize, BrilligOpcode as Opcode, IntegerBitSize, Label}; diff --git a/acvm-repo/brillig/src/opcodes.rs b/acvm-repo/brillig/src/opcodes.rs index 78c6ba8097c..fdcae01b5b5 100644 --- a/acvm-repo/brillig/src/opcodes.rs +++ b/acvm-repo/brillig/src/opcodes.rs @@ -1,5 +1,5 @@ use crate::black_box::BlackBoxOp; -use acir_field::{AcirField, FieldElement}; +use acir_field::AcirField; use serde::{Deserialize, Serialize}; pub type Label = usize; @@ -24,7 +24,7 @@ impl From for MemoryAddress { #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum HeapValueType { // A single field element is enough to represent the value with a given bit size - Simple(u32), + Simple(BitSize), // The value read should be interpreted as a pointer to a heap array, which // consists of a pointer to a slice of memory of size elements, and a // reference count @@ -41,7 +41,7 @@ impl HeapValueType { } pub fn field() -> HeapValueType { - HeapValueType::Simple(FieldElement::max_num_bits()) + HeapValueType::Simple(BitSize::Field) } } @@ -65,6 +65,85 @@ pub struct HeapVector { pub size: MemoryAddress, } +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord)] +pub enum IntegerBitSize { + U0, // Uninitialized + U1, + U8, + U16, + U32, + U64, + U128, +} + +impl From for u32 { + fn from(bit_size: IntegerBitSize) -> u32 { + match bit_size { + IntegerBitSize::U0 => 0, + IntegerBitSize::U1 => 1, + IntegerBitSize::U8 => 8, + IntegerBitSize::U16 => 16, + IntegerBitSize::U32 => 32, + IntegerBitSize::U64 => 64, + IntegerBitSize::U128 => 128, + } + } +} + +impl TryFrom for IntegerBitSize { + type Error = &'static str; + + fn try_from(value: u32) -> Result { + match value { + 0 => Ok(IntegerBitSize::U0), + 1 => Ok(IntegerBitSize::U1), + 8 => Ok(IntegerBitSize::U8), + 16 => Ok(IntegerBitSize::U16), + 32 => Ok(IntegerBitSize::U32), + 64 => Ok(IntegerBitSize::U64), + 128 => Ok(IntegerBitSize::U128), + _ => Err("Invalid bit size"), + } + } +} + +impl std::fmt::Display for IntegerBitSize { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + IntegerBitSize::U0 => write!(f, "null"), + IntegerBitSize::U1 => write!(f, "bool"), + IntegerBitSize::U8 => write!(f, "u8"), + IntegerBitSize::U16 => write!(f, "u16"), + IntegerBitSize::U32 => write!(f, "u32"), + IntegerBitSize::U64 => write!(f, "u64"), + IntegerBitSize::U128 => write!(f, "u128"), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord)] +pub enum BitSize { + Field, + Integer(IntegerBitSize), +} + +impl BitSize { + pub fn to_u32(self) -> u32 { + match self { + BitSize::Field => F::max_num_bits(), + BitSize::Integer(bit_size) => bit_size.into(), + } + } + + pub fn try_from_u32(value: u32) -> Result { + if value == F::max_num_bits() { + Ok(BitSize::Field) + } else { + Ok(BitSize::Integer(IntegerBitSize::try_from(value)?)) + } + } +} + /// Lays out various ways an external foreign call's input and output data may be interpreted inside Brillig. /// This data can either be an individual value or memory. /// @@ -105,14 +184,14 @@ pub enum BrilligOpcode { BinaryIntOp { destination: MemoryAddress, op: BinaryIntOp, - bit_size: u32, + bit_size: IntegerBitSize, lhs: MemoryAddress, rhs: MemoryAddress, }, Cast { destination: MemoryAddress, source: MemoryAddress, - bit_size: u32, + bit_size: BitSize, }, JumpIfNot { condition: MemoryAddress, @@ -141,7 +220,7 @@ pub enum BrilligOpcode { }, Const { destination: MemoryAddress, - bit_size: u32, + bit_size: BitSize, value: F, }, Return, diff --git a/acvm-repo/brillig_vm/Cargo.toml b/acvm-repo/brillig_vm/Cargo.toml index d048d625083..68ac094bac8 100644 --- a/acvm-repo/brillig_vm/Cargo.toml +++ b/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.47.0" +version = "0.48.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig_vm/src/arithmetic.rs b/acvm-repo/brillig_vm/src/arithmetic.rs index c88e06e2b94..7cd31cd6443 100644 --- a/acvm-repo/brillig_vm/src/arithmetic.rs +++ b/acvm-repo/brillig_vm/src/arithmetic.rs @@ -1,8 +1,6 @@ -use acir::brillig::{BinaryFieldOp, BinaryIntOp}; +use acir::brillig::{BinaryFieldOp, BinaryIntOp, IntegerBitSize}; use acir::AcirField; use num_bigint::BigUint; -use num_traits::ToPrimitive; -use num_traits::{One, Zero}; use crate::memory::{MemoryTypeError, MemoryValue}; @@ -12,10 +10,8 @@ pub(crate) enum BrilligArithmeticError { MismatchedLhsBitSize { lhs_bit_size: u32, op_bit_size: u32 }, #[error("Bit size for rhs {rhs_bit_size} does not match op bit size {op_bit_size}")] MismatchedRhsBitSize { rhs_bit_size: u32, op_bit_size: u32 }, - #[error("Integer operation BinaryIntOp::{op:?} is not supported on FieldElement")] - IntegerOperationOnField { op: BinaryIntOp }, - #[error("Shift with bit size {op_bit_size} is invalid")] - InvalidShift { op_bit_size: u32 }, + #[error("Attempted to divide by zero")] + DivisionByZero, } /// Evaluate a binary operation on two FieldElement memory values. @@ -24,17 +20,23 @@ pub(crate) fn evaluate_binary_field_op( lhs: MemoryValue, rhs: MemoryValue, ) -> Result, BrilligArithmeticError> { - let MemoryValue::Field(a) = lhs else { - return Err(BrilligArithmeticError::MismatchedLhsBitSize { - lhs_bit_size: lhs.bit_size(), - op_bit_size: F::max_num_bits(), - }); + let a = match lhs { + MemoryValue::Field(a) => a, + MemoryValue::Integer(_, bit_size) => { + return Err(BrilligArithmeticError::MismatchedLhsBitSize { + lhs_bit_size: bit_size.into(), + op_bit_size: F::max_num_bits(), + }); + } }; - let MemoryValue::Field(b) = rhs else { - return Err(BrilligArithmeticError::MismatchedLhsBitSize { - lhs_bit_size: rhs.bit_size(), - op_bit_size: F::max_num_bits(), - }); + let b = match rhs { + MemoryValue::Field(b) => b, + MemoryValue::Integer(_, bit_size) => { + return Err(BrilligArithmeticError::MismatchedRhsBitSize { + rhs_bit_size: bit_size.into(), + op_bit_size: F::max_num_bits(), + }); + } }; Ok(match op { @@ -44,11 +46,15 @@ pub(crate) fn evaluate_binary_field_op( BinaryFieldOp::Mul => MemoryValue::new_field(a * b), BinaryFieldOp::Div => MemoryValue::new_field(a / b), BinaryFieldOp::IntegerDiv => { - let a_big = BigUint::from_bytes_be(&a.to_be_bytes()); - let b_big = BigUint::from_bytes_be(&b.to_be_bytes()); + if b.is_zero() { + return Err(BrilligArithmeticError::DivisionByZero); + } else { + let a_big = BigUint::from_bytes_be(&a.to_be_bytes()); + let b_big = BigUint::from_bytes_be(&b.to_be_bytes()); - let result = a_big / b_big; - MemoryValue::new_field(F::from_be_bytes_reduce(&result.to_bytes_be())) + let result = a_big / b_big; + MemoryValue::new_field(F::from_be_bytes_reduce(&result.to_bytes_be())) + } } BinaryFieldOp::Equals => (a == b).into(), BinaryFieldOp::LessThan => (a < b).into(), @@ -61,7 +67,7 @@ pub(crate) fn evaluate_binary_int_op( op: &BinaryIntOp, lhs: MemoryValue, rhs: MemoryValue, - bit_size: u32, + bit_size: IntegerBitSize, ) -> Result, BrilligArithmeticError> { let lhs = lhs.expect_integer_with_bit_size(bit_size).map_err(|err| match err { MemoryTypeError::MismatchedBitSize { value_bit_size, expected_bit_size } => { @@ -71,8 +77,13 @@ pub(crate) fn evaluate_binary_int_op( } } })?; - let rhs_bit_size = - if op == &BinaryIntOp::Shl || op == &BinaryIntOp::Shr { 8 } else { bit_size }; + + let rhs_bit_size = if op == &BinaryIntOp::Shl || op == &BinaryIntOp::Shr { + IntegerBitSize::U8 + } else { + bit_size + }; + let rhs = rhs.expect_integer_with_bit_size(rhs_bit_size).map_err(|err| match err { MemoryTypeError::MismatchedBitSize { value_bit_size, expected_bit_size } => { BrilligArithmeticError::MismatchedRhsBitSize { @@ -82,74 +93,107 @@ pub(crate) fn evaluate_binary_int_op( } })?; - if bit_size == F::max_num_bits() { - return Err(BrilligArithmeticError::IntegerOperationOnField { op: *op }); - } + let result = if bit_size == IntegerBitSize::U128 { + evaluate_binary_int_op_128(op, lhs, rhs)? + } else { + evaluate_binary_int_op_generic(op, lhs, rhs, bit_size)? + }; - let bit_modulo = &(BigUint::one() << bit_size); + Ok(match op { + BinaryIntOp::Equals | BinaryIntOp::LessThan | BinaryIntOp::LessThanEquals => { + MemoryValue::new_integer(result, IntegerBitSize::U1) + } + _ => MemoryValue::new_integer(result, bit_size), + }) +} + +fn evaluate_binary_int_op_128( + op: &BinaryIntOp, + lhs: u128, + rhs: u128, +) -> Result { let result = match op { - // Perform addition, subtraction, and multiplication, applying a modulo operation to keep the result within the bit size. - BinaryIntOp::Add => (lhs + rhs) % bit_modulo, - BinaryIntOp::Sub => (bit_modulo + lhs - rhs) % bit_modulo, - BinaryIntOp::Mul => (lhs * rhs) % bit_modulo, - // Perform unsigned division using the modulo operation on a and b. + BinaryIntOp::Add => lhs.wrapping_add(rhs), + BinaryIntOp::Sub => lhs.wrapping_sub(rhs), + BinaryIntOp::Mul => lhs.wrapping_mul(rhs), BinaryIntOp::Div => { - if rhs.is_zero() { - BigUint::zero() + if rhs == 0 { + return Err(BrilligArithmeticError::DivisionByZero); } else { lhs / rhs } } - // Perform a == operation, returning 0 or 1 - BinaryIntOp::Equals => { - if lhs == rhs { - BigUint::one() + BinaryIntOp::Equals => (lhs == rhs) as u128, + BinaryIntOp::LessThan => (lhs < rhs) as u128, + BinaryIntOp::LessThanEquals => (lhs <= rhs) as u128, + BinaryIntOp::And => lhs & rhs, + BinaryIntOp::Or => lhs | rhs, + BinaryIntOp::Xor => lhs ^ rhs, + BinaryIntOp::Shl => { + if rhs >= 128 { + 0 } else { - BigUint::zero() + lhs.wrapping_shl(rhs as u32) } } - // Perform a < operation, returning 0 or 1 - BinaryIntOp::LessThan => { - if lhs < rhs { - BigUint::one() + BinaryIntOp::Shr => { + if rhs >= 128 { + 0 } else { - BigUint::zero() + lhs.wrapping_shr(rhs as u32) } } - // Perform a <= operation, returning 0 or 1 - BinaryIntOp::LessThanEquals => { - if lhs <= rhs { - BigUint::one() + }; + Ok(result) +} + +fn evaluate_binary_int_op_generic( + op: &BinaryIntOp, + lhs: u128, + rhs: u128, + bit_size: IntegerBitSize, +) -> Result { + let bit_size: u32 = bit_size.into(); + let bit_modulo = 1 << bit_size; + let result = match op { + // Perform addition, subtraction, and multiplication, applying a modulo operation to keep the result within the bit size. + BinaryIntOp::Add => (lhs + rhs) % bit_modulo, + BinaryIntOp::Sub => (bit_modulo + lhs - rhs) % bit_modulo, + BinaryIntOp::Mul => (lhs * rhs) % bit_modulo, + // Perform unsigned division using the modulo operation on a and b. + BinaryIntOp::Div => { + if rhs == 0 { + return Err(BrilligArithmeticError::DivisionByZero); } else { - BigUint::zero() + lhs / rhs } } + // Perform a == operation, returning 0 or 1 + BinaryIntOp::Equals => (lhs == rhs) as u128, + // Perform a < operation, returning 0 or 1 + BinaryIntOp::LessThan => (lhs < rhs) as u128, + // Perform a <= operation, returning 0 or 1 + BinaryIntOp::LessThanEquals => (lhs <= rhs) as u128, // Perform bitwise AND, OR, XOR, left shift, and right shift operations, applying a modulo operation to keep the result within the bit size. BinaryIntOp::And => lhs & rhs, BinaryIntOp::Or => lhs | rhs, BinaryIntOp::Xor => lhs ^ rhs, BinaryIntOp::Shl => { - if bit_size > 128 { - return Err(BrilligArithmeticError::InvalidShift { op_bit_size: bit_size }); + if rhs >= (bit_size as u128) { + 0 + } else { + (lhs << rhs) % bit_modulo } - let rhs = rhs.to_u128().unwrap(); - (lhs << rhs) % bit_modulo } BinaryIntOp::Shr => { - if bit_size > 128 { - return Err(BrilligArithmeticError::InvalidShift { op_bit_size: bit_size }); + if rhs >= (bit_size as u128) { + 0 + } else { + lhs >> rhs } - let rhs = rhs.to_u128().unwrap(); - lhs >> rhs } }; - - Ok(match op { - BinaryIntOp::Equals | BinaryIntOp::LessThan | BinaryIntOp::LessThanEquals => { - MemoryValue::new_integer(result, 1) - } - _ => MemoryValue::new_integer(result, bit_size), - }) + Ok(result) } #[cfg(test)] @@ -163,11 +207,11 @@ mod tests { result: u128, } - fn evaluate_u128(op: &BinaryIntOp, a: u128, b: u128, bit_size: u32) -> u128 { + fn evaluate_u128(op: &BinaryIntOp, a: u128, b: u128, bit_size: IntegerBitSize) -> u128 { let result_value: MemoryValue = evaluate_binary_int_op( op, - MemoryValue::new_integer(a.into(), bit_size), - MemoryValue::new_integer(b.into(), bit_size), + MemoryValue::new_integer(a, bit_size), + MemoryValue::new_integer(b, bit_size), bit_size, ) .unwrap(); @@ -175,13 +219,17 @@ mod tests { result_value.to_field().to_u128() } - fn to_negative(a: u128, bit_size: u32) -> u128 { + fn to_negative(a: u128, bit_size: IntegerBitSize) -> u128 { assert!(a > 0); - let two_pow = 2_u128.pow(bit_size); - two_pow - a + if bit_size == IntegerBitSize::U128 { + 0_u128.wrapping_sub(a) + } else { + let two_pow = 2_u128.pow(bit_size.into()); + two_pow - a + } } - fn evaluate_int_ops(test_params: Vec, op: BinaryIntOp, bit_size: u32) { + fn evaluate_int_ops(test_params: Vec, op: BinaryIntOp, bit_size: IntegerBitSize) { for test in test_params { assert_eq!(evaluate_u128(&op, test.a, test.b, bit_size), test.result); } @@ -189,64 +237,83 @@ mod tests { #[test] fn add_test() { - let bit_size = 4; + let bit_size = IntegerBitSize::U8; let test_ops = vec![ - TestParams { a: 5, b: 10, result: 15 }, - TestParams { a: 10, b: 10, result: 4 }, + TestParams { a: 50, b: 100, result: 150 }, + TestParams { a: 250, b: 10, result: 4 }, TestParams { a: 5, b: to_negative(3, bit_size), result: 2 }, TestParams { a: to_negative(3, bit_size), b: 1, result: to_negative(2, bit_size) }, TestParams { a: 5, b: to_negative(6, bit_size), result: to_negative(1, bit_size) }, ]; + evaluate_int_ops(test_ops, BinaryIntOp::Add, bit_size); + + let bit_size = IntegerBitSize::U128; + let test_ops = vec![ + TestParams { a: 5, b: to_negative(3, bit_size), result: 2 }, + TestParams { a: to_negative(3, bit_size), b: 1, result: to_negative(2, bit_size) }, + ]; evaluate_int_ops(test_ops, BinaryIntOp::Add, bit_size); } #[test] fn sub_test() { - let bit_size = 4; + let bit_size = IntegerBitSize::U8; let test_ops = vec![ - TestParams { a: 5, b: 3, result: 2 }, + TestParams { a: 50, b: 30, result: 20 }, TestParams { a: 5, b: 10, result: to_negative(5, bit_size) }, TestParams { a: 5, b: to_negative(3, bit_size), result: 8 }, TestParams { a: to_negative(3, bit_size), b: 2, result: to_negative(5, bit_size) }, - TestParams { a: 14, b: to_negative(3, bit_size), result: 1 }, + TestParams { a: 254, b: to_negative(3, bit_size), result: 1 }, ]; + evaluate_int_ops(test_ops, BinaryIntOp::Sub, bit_size); + let bit_size = IntegerBitSize::U128; + + let test_ops = vec![ + TestParams { a: 5, b: 10, result: to_negative(5, bit_size) }, + TestParams { a: to_negative(3, bit_size), b: 2, result: to_negative(5, bit_size) }, + ]; evaluate_int_ops(test_ops, BinaryIntOp::Sub, bit_size); } #[test] fn mul_test() { - let bit_size = 4; + let bit_size = IntegerBitSize::U8; let test_ops = vec![ TestParams { a: 5, b: 3, result: 15 }, - TestParams { a: 5, b: 10, result: 2 }, + TestParams { a: 5, b: 100, result: 244 }, TestParams { a: to_negative(1, bit_size), b: to_negative(5, bit_size), result: 5 }, TestParams { a: to_negative(1, bit_size), b: 5, result: to_negative(5, bit_size) }, - TestParams { - a: to_negative(2, bit_size), - b: 7, - // negative 14 wraps to a 2 - result: to_negative(14, bit_size), - }, + TestParams { a: to_negative(2, bit_size), b: 7, result: to_negative(14, bit_size) }, ]; evaluate_int_ops(test_ops, BinaryIntOp::Mul, bit_size); - let bit_size = 127; - let a = 2_u128.pow(bit_size) - 1; + let bit_size = IntegerBitSize::U64; + let a = 2_u128.pow(bit_size.into()) - 1; let b = 3; // ( 2**(n-1) - 1 ) * 3 = 2*2**(n-1) - 2 + (2**(n-1) - 1) => wraps to (2**(n-1) - 1) - 2 assert_eq!(evaluate_u128(&BinaryIntOp::Mul, a, b, bit_size), a - 2); + + let bit_size = IntegerBitSize::U128; + + let test_ops = vec![ + TestParams { a: to_negative(1, bit_size), b: to_negative(5, bit_size), result: 5 }, + TestParams { a: to_negative(1, bit_size), b: 5, result: to_negative(5, bit_size) }, + TestParams { a: to_negative(2, bit_size), b: 7, result: to_negative(14, bit_size) }, + ]; + + evaluate_int_ops(test_ops, BinaryIntOp::Mul, bit_size); } #[test] fn div_test() { - let bit_size = 4; + let bit_size = IntegerBitSize::U8; let test_ops = vec![TestParams { a: 5, b: 3, result: 1 }, TestParams { a: 5, b: 10, result: 0 }]; diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 53599f79bc7..b49757944ad 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -28,7 +28,7 @@ fn read_heap_array<'a, F: AcirField>( /// Extracts the last byte of every value fn to_u8_vec(inputs: &[MemoryValue]) -> Vec { let mut result = Vec::with_capacity(inputs.len()); - for input in inputs { + for &input in inputs { result.push(input.try_into().unwrap()); } result @@ -91,7 +91,7 @@ pub(crate) fn evaluate_black_box BlackBoxOp::Keccakf1600 { message, output } => { let state_vec: Vec = read_heap_vector(memory, message) .iter() - .map(|memory_value| memory_value.try_into().unwrap()) + .map(|&memory_value| memory_value.try_into().unwrap()) .collect(); let state: [u64; 25] = state_vec.try_into().unwrap(); @@ -166,7 +166,7 @@ pub(crate) fn evaluate_black_box let points: Vec = read_heap_vector(memory, points) .iter() .enumerate() - .map(|(i, x)| { + .map(|(i, &x)| { if i % 3 == 2 { let is_infinite: bool = x.try_into().unwrap(); F::from(is_infinite as u128) @@ -301,9 +301,9 @@ pub(crate) fn evaluate_black_box } BlackBoxOp::BigIntFromLeBytes { inputs, modulus, output } => { let input = read_heap_vector(memory, inputs); - let input: Vec = input.iter().map(|x| x.try_into().unwrap()).collect(); + let input: Vec = input.iter().map(|&x| x.try_into().unwrap()).collect(); let modulus = read_heap_vector(memory, modulus); - let modulus: Vec = modulus.iter().map(|x| x.try_into().unwrap()).collect(); + let modulus: Vec = modulus.iter().map(|&x| x.try_into().unwrap()).collect(); let new_id = bigint_solver.bigint_from_bytes(&input, &modulus)?; memory.write(*output, new_id.into()); @@ -345,7 +345,7 @@ pub(crate) fn evaluate_black_box format!("Expected 16 inputs but encountered {}", &inputs.len()), )); } - for (i, input) in inputs.iter().enumerate() { + for (i, &input) in inputs.iter().enumerate() { message[i] = input.try_into().unwrap(); } let mut state = [0; 8]; @@ -356,7 +356,7 @@ pub(crate) fn evaluate_black_box format!("Expected 8 values but encountered {}", &values.len()), )); } - for (i, value) in values.iter().enumerate() { + for (i, &value) in values.iter().enumerate() { state[i] = value.try_into().unwrap(); } @@ -421,6 +421,14 @@ impl BrilligBigintSolver { rhs: u32, func: BlackBoxFunc, ) -> Result { + let modulus_lhs = self.bigint_solver.get_modulus(lhs, func)?; + let modulus_rhs = self.bigint_solver.get_modulus(rhs, func)?; + if modulus_lhs != modulus_rhs { + return Err(BlackBoxResolutionError::Failed( + func, + "moduli should be identical in BigInt operation".to_string(), + )); + } let id = self.create_bigint_id(); self.bigint_solver.bigint_op(lhs, rhs, id, func)?; Ok(id) diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 4d2dd2b8333..936ad120335 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -12,14 +12,13 @@ //! [acvm]: https://crates.io/crates/acvm use acir::brillig::{ - BinaryFieldOp, BinaryIntOp, ForeignCallParam, ForeignCallResult, HeapArray, HeapValueType, - HeapVector, MemoryAddress, Opcode, ValueOrArray, + BinaryFieldOp, BinaryIntOp, BitSize, ForeignCallParam, ForeignCallResult, HeapArray, + HeapValueType, HeapVector, IntegerBitSize, MemoryAddress, Opcode, ValueOrArray, }; use acir::AcirField; use acvm_blackbox_solver::BlackBoxFunctionSolver; use arithmetic::{evaluate_binary_field_op, evaluate_binary_int_op, BrilligArithmeticError}; use black_box::{evaluate_black_box, BrilligBigintSolver}; -use num_bigint::BigUint; // Re-export `brillig`. pub use acir::brillig; @@ -557,7 +556,7 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { &mut self, destination: MemoryAddress, value: &F, - value_bit_size: u32, + value_bit_size: BitSize, ) -> Result<(), String> { let memory_value = MemoryValue::new_checked(*value, value_bit_size); @@ -565,7 +564,7 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { self.memory.write(destination, memory_value); } else { return Err(format!( - "Foreign call result value {} does not fit in bit size {}", + "Foreign call result value {} does not fit in bit size {:?}", value, value_bit_size )); } @@ -689,7 +688,7 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { fn process_binary_int_op( &mut self, op: BinaryIntOp, - bit_size: u32, + bit_size: IntegerBitSize, lhs: MemoryAddress, rhs: MemoryAddress, result: MemoryAddress, @@ -703,18 +702,46 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { } /// Casts a value to a different bit size. - fn cast(&self, bit_size: u32, source_value: MemoryValue) -> MemoryValue { - let lhs_big = source_value.to_integer(); - let mask = BigUint::from(2_u32).pow(bit_size) - 1_u32; - MemoryValue::new_from_integer(lhs_big & mask, bit_size) + fn cast(&self, target_bit_size: BitSize, source_value: MemoryValue) -> MemoryValue { + match (source_value, target_bit_size) { + // Field to field, no op + (MemoryValue::Field(_), BitSize::Field) => source_value, + // Field downcast to u128 + (MemoryValue::Field(field), BitSize::Integer(IntegerBitSize::U128)) => { + MemoryValue::Integer(field.to_u128(), IntegerBitSize::U128) + } + // Field downcast to arbitrary bit size + (MemoryValue::Field(field), BitSize::Integer(target_bit_size)) => { + let as_u128 = field.to_u128(); + let target_bit_size_u32: u32 = target_bit_size.into(); + let mask = (1_u128 << target_bit_size_u32) - 1; + MemoryValue::Integer(as_u128 & mask, target_bit_size) + } + // Integer upcast to field + (MemoryValue::Integer(integer, _), BitSize::Field) => { + MemoryValue::new_field(integer.into()) + } + // Integer upcast to integer + (MemoryValue::Integer(integer, source_bit_size), BitSize::Integer(target_bit_size)) + if source_bit_size <= target_bit_size => + { + MemoryValue::Integer(integer, target_bit_size) + } + // Integer downcast + (MemoryValue::Integer(integer, _), BitSize::Integer(target_bit_size)) => { + let target_bit_size_u32: u32 = target_bit_size.into(); + let mask = (1_u128 << target_bit_size_u32) - 1; + MemoryValue::Integer(integer & mask, target_bit_size) + } + } } } #[cfg(test)] mod tests { + use crate::memory::MEMORY_ADDRESSING_BIT_SIZE; use acir::{AcirField, FieldElement}; use acvm_blackbox_solver::StubbedBlackBoxSolver; - const BRILLIG_MEMORY_ADDRESSING_BIT_SIZE: u32 = 32; use super::*; @@ -877,7 +904,7 @@ mod tests { Opcode::Cast { destination: MemoryAddress::from(1), source: MemoryAddress::from(0), - bit_size: 8, + bit_size: BitSize::Integer(IntegerBitSize::U8), }, Opcode::Stop { return_data_offset: 1, return_data_size: 1 }, ]; @@ -943,13 +970,13 @@ mod tests { let cast_zero = Opcode::Cast { destination: MemoryAddress::from(0), source: MemoryAddress::from(0), - bit_size: 1, + bit_size: BitSize::Integer(IntegerBitSize::U1), }; let cast_one = Opcode::Cast { destination: MemoryAddress::from(1), source: MemoryAddress::from(1), - bit_size: 1, + bit_size: BitSize::Integer(IntegerBitSize::U1), }; let opcodes = &[ @@ -997,7 +1024,7 @@ mod tests { #[test] fn cmp_binary_ops() { - let bit_size = BRILLIG_MEMORY_ADDRESSING_BIT_SIZE; + let bit_size = MEMORY_ADDRESSING_BIT_SIZE; let calldata: Vec = vec![(2u128).into(), (2u128).into(), (0u128).into(), (5u128).into(), (6u128).into()]; let calldata_size = calldata.len(); @@ -1012,7 +1039,7 @@ mod tests { .map(|index| Opcode::Cast { destination: MemoryAddress::from(index), source: MemoryAddress::from(index), - bit_size, + bit_size: BitSize::Integer(bit_size), }) .collect(); @@ -1099,7 +1126,8 @@ mod tests { /// i += 1; /// } fn brillig_write_memory(item_count: usize) -> Vec> { - let bit_size = BRILLIG_MEMORY_ADDRESSING_BIT_SIZE; + let integer_bit_size = MEMORY_ADDRESSING_BIT_SIZE; + let bit_size = BitSize::Integer(integer_bit_size); let r_i = MemoryAddress::from(0); let r_len = MemoryAddress::from(1); let r_tmp = MemoryAddress::from(2); @@ -1124,7 +1152,7 @@ mod tests { lhs: r_i, op: BinaryIntOp::Add, rhs: r_tmp, - bit_size, + bit_size: integer_bit_size, }, // pointer = pointer + 1 Opcode::BinaryIntOp { @@ -1132,7 +1160,7 @@ mod tests { lhs: r_pointer, op: BinaryIntOp::Add, rhs: r_tmp, - bit_size, + bit_size: integer_bit_size, }, // tmp = i < len Opcode::BinaryIntOp { @@ -1140,7 +1168,7 @@ mod tests { lhs: r_i, op: BinaryIntOp::LessThan, rhs: r_len, - bit_size, + bit_size: integer_bit_size, }, // if tmp != 0 goto loop_body Opcode::JumpIf { condition: r_tmp, location: start.len() }, @@ -1172,7 +1200,7 @@ mod tests { /// i += 1; /// } fn brillig_sum_memory(memory: Vec) -> FieldElement { - let bit_size = 32; + let bit_size = IntegerBitSize::U32; let r_i = MemoryAddress::from(0); let r_len = MemoryAddress::from(1); let r_sum = MemoryAddress::from(2); @@ -1181,17 +1209,25 @@ mod tests { let start: [Opcode; 5] = [ // sum = 0 + Opcode::Const { destination: r_sum, value: 0u128.into(), bit_size: BitSize::Field }, + // i = 0 Opcode::Const { - destination: r_sum, + destination: r_i, value: 0u128.into(), - bit_size: FieldElement::max_num_bits(), + bit_size: BitSize::Integer(bit_size), }, - // i = 0 - Opcode::Const { destination: r_i, value: 0u128.into(), bit_size }, // len = array.len() (approximation) - Opcode::Const { destination: r_len, value: memory.len().into(), bit_size }, + Opcode::Const { + destination: r_len, + value: memory.len().into(), + bit_size: BitSize::Integer(bit_size), + }, // pointer = array_ptr - Opcode::Const { destination: r_pointer, value: 5u128.into(), bit_size }, + Opcode::Const { + destination: r_pointer, + value: 5u128.into(), + bit_size: BitSize::Integer(bit_size), + }, Opcode::CalldataCopy { destination_address: MemoryAddress(5), size: memory.len(), @@ -1209,7 +1245,11 @@ mod tests { rhs: r_tmp, }, // tmp = 1 - Opcode::Const { destination: r_tmp, value: 1u128.into(), bit_size }, + Opcode::Const { + destination: r_tmp, + value: 1u128.into(), + bit_size: BitSize::Integer(bit_size), + }, // i = i + 1 (tmp) Opcode::BinaryIntOp { destination: r_i, @@ -1268,7 +1308,8 @@ mod tests { /// } /// Note we represent a 100% in-stack optimized form in brillig fn brillig_recursive_write_memory(size: usize) -> Vec> { - let bit_size = BRILLIG_MEMORY_ADDRESSING_BIT_SIZE; + let integer_bit_size = MEMORY_ADDRESSING_BIT_SIZE; + let bit_size = BitSize::Integer(integer_bit_size); let r_i = MemoryAddress::from(0); let r_len = MemoryAddress::from(1); let r_tmp = MemoryAddress::from(2); @@ -1296,7 +1337,7 @@ mod tests { lhs: r_len, op: BinaryIntOp::LessThanEquals, rhs: r_i, - bit_size, + bit_size: integer_bit_size, }, // if !tmp, goto end Opcode::JumpIf { @@ -1313,7 +1354,7 @@ mod tests { lhs: r_i, op: BinaryIntOp::Add, rhs: r_tmp, - bit_size, + bit_size: integer_bit_size, }, // pointer = pointer + 1 Opcode::BinaryIntOp { @@ -1321,7 +1362,7 @@ mod tests { lhs: r_pointer, op: BinaryIntOp::Add, rhs: r_tmp, - bit_size, + bit_size: integer_bit_size, }, // call recursive_fn Opcode::Call { location: start.len() }, @@ -1374,15 +1415,19 @@ mod tests { Opcode::Const { destination: r_input, value: (5u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // Call foreign function "double" with the input address Opcode::ForeignCall { function: "double".into(), destinations: vec![ValueOrArray::MemoryAddress(r_result)], - destination_value_types: vec![HeapValueType::Simple(32)], + destination_value_types: vec![HeapValueType::Simple(BitSize::Integer( + MEMORY_ADDRESSING_BIT_SIZE, + ))], inputs: vec![ValueOrArray::MemoryAddress(r_input)], - input_value_types: vec![HeapValueType::Simple(32)], + input_value_types: vec![HeapValueType::Simple(BitSize::Integer( + MEMORY_ADDRESSING_BIT_SIZE, + ))], }, ]; @@ -1439,13 +1484,13 @@ mod tests { Opcode::Const { destination: r_input, value: 2_usize.into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // output = 0 Opcode::Const { destination: r_output, value: 2_usize.into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // *output = matrix_2x2_transpose(*input) Opcode::ForeignCall { @@ -1529,25 +1574,25 @@ mod tests { Opcode::Const { destination: r_input_pointer, value: (4u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // input_size = input_string.len() (constant here) Opcode::Const { destination: r_input_size, value: input_string.len().into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // output_pointer = 4 + input_size Opcode::Const { destination: r_output_pointer, value: (4 + input_string.len()).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // output_size = input_size * 2 Opcode::Const { destination: r_output_size, value: (input_string.len() * 2).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // output_pointer[0..output_size] = string_double(input_pointer[0...input_size]) Opcode::ForeignCall { @@ -1627,13 +1672,13 @@ mod tests { Opcode::Const { destination: r_input, value: (2u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // output = 0 Opcode::Const { destination: r_output, value: (6u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // *output = matrix_2x2_transpose(*input) Opcode::ForeignCall { @@ -1726,19 +1771,19 @@ mod tests { Opcode::Const { destination: r_input_a, value: (3u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // input = 7 Opcode::Const { destination: r_input_b, value: (7u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // output = 0 Opcode::Const { destination: r_output, value: (0u128).into(), - bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + bit_size: BitSize::Integer(MEMORY_ADDRESSING_BIT_SIZE), }, // *output = matrix_2x2_transpose(*input) Opcode::ForeignCall { @@ -1853,7 +1898,7 @@ mod tests { let input_array_value_types: Vec = vec![ HeapValueType::field(), - HeapValueType::Simple(64), // size of following vector + HeapValueType::Simple(BitSize::Integer(IntegerBitSize::U64)), // size of following vector HeapValueType::Vector { value_types: vec![HeapValueType::field()] }, HeapValueType::Array { value_types: vec![HeapValueType::field()], size: 1 }, ]; @@ -1876,7 +1921,11 @@ mod tests { })) .chain(vec![ // input = 0 - Opcode::Const { destination: r_input, value: (outer_ptr).into(), bit_size: 32 }, + Opcode::Const { + destination: r_input, + value: (outer_ptr).into(), + bit_size: BitSize::Integer(IntegerBitSize::U32), + }, // some_function(input) Opcode::ForeignCall { function: "flat_sum".into(), diff --git a/acvm-repo/brillig_vm/src/memory.rs b/acvm-repo/brillig_vm/src/memory.rs index 95e28f7d863..ef1e0301387 100644 --- a/acvm-repo/brillig_vm/src/memory.rs +++ b/acvm-repo/brillig_vm/src/memory.rs @@ -1,13 +1,15 @@ -use acir::{brillig::MemoryAddress, AcirField}; -use num_bigint::BigUint; +use acir::{ + brillig::{BitSize, IntegerBitSize, MemoryAddress}, + AcirField, +}; use num_traits::{One, Zero}; -pub const MEMORY_ADDRESSING_BIT_SIZE: u32 = 32; +pub const MEMORY_ADDRESSING_BIT_SIZE: IntegerBitSize = IntegerBitSize::U32; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum MemoryValue { Field(F), - Integer(BigUint, u32), + Integer(u128, IntegerBitSize), } #[derive(Debug, thiserror::Error)] @@ -22,6 +24,11 @@ impl MemoryValue { MemoryValue::Field(value) } + /// Builds an integer-typed memory value. + pub fn new_integer(value: u128, bit_size: IntegerBitSize) -> Self { + MemoryValue::Integer(value, bit_size) + } + /// Extracts the field element from the memory value, if it is typed as field element. pub fn extract_field(&self) -> Option<&F> { match self { @@ -31,86 +38,63 @@ impl MemoryValue { } /// Extracts the integer from the memory value, if it is typed as integer. - pub fn extract_integer(&self) -> Option<(&BigUint, u32)> { + pub fn extract_integer(&self) -> Option<(u128, IntegerBitSize)> { match self { - MemoryValue::Integer(value, bit_size) => Some((value, *bit_size)), + MemoryValue::Integer(value, bit_size) => Some((*value, *bit_size)), _ => None, } } -} -impl MemoryValue { - /// Builds a memory value from a field element. - pub fn new_from_field(value: F, bit_size: u32) -> Self { - if bit_size == F::max_num_bits() { - MemoryValue::new_field(value) - } else { - MemoryValue::new_integer(BigUint::from_bytes_be(&value.to_be_bytes()), bit_size) + pub fn bit_size(&self) -> BitSize { + match self { + MemoryValue::Field(_) => BitSize::Field, + MemoryValue::Integer(_, bit_size) => BitSize::Integer(*bit_size), + } + } + + pub fn to_usize(&self) -> usize { + match self { + MemoryValue::Integer(_, bit_size) if *bit_size == MEMORY_ADDRESSING_BIT_SIZE => { + self.extract_integer().unwrap().0.try_into().unwrap() + } + _ => panic!("value is not typed as brillig usize"), } } +} - /// Builds a memory value from an integer - pub fn new_from_integer(value: BigUint, bit_size: u32) -> Self { - if bit_size == F::max_num_bits() { - MemoryValue::new_field(F::from_be_bytes_reduce(&value.to_bytes_be())) +impl MemoryValue { + /// Builds a memory value from a field element. + pub fn new_from_field(value: F, bit_size: BitSize) -> Self { + if let BitSize::Integer(bit_size) = bit_size { + MemoryValue::new_integer(value.to_u128(), bit_size) } else { - MemoryValue::new_integer(value, bit_size) + MemoryValue::new_field(value) } } /// Builds a memory value from a field element, checking that the value is within the bit size. - pub fn new_checked(value: F, bit_size: u32) -> Option { - if bit_size < F::max_num_bits() && value.num_bits() > bit_size { - return None; + pub fn new_checked(value: F, bit_size: BitSize) -> Option { + if let BitSize::Integer(bit_size) = bit_size { + if value.num_bits() > bit_size.into() { + return None; + } } Some(MemoryValue::new_from_field(value, bit_size)) } - /// Builds an integer-typed memory value. - pub fn new_integer(value: BigUint, bit_size: u32) -> Self { - assert!( - bit_size != F::max_num_bits(), - "Tried to build a field memory value via new_integer" - ); - MemoryValue::Integer(value, bit_size) - } - /// Converts the memory value to a field element, independent of its type. pub fn to_field(&self) -> F { match self { MemoryValue::Field(value) => *value, - MemoryValue::Integer(value, _) => F::from_be_bytes_reduce(&value.to_bytes_be()), - } - } - - /// Converts the memory value to an integer, independent of its type. - pub fn to_integer(self) -> BigUint { - match self { - MemoryValue::Field(value) => BigUint::from_bytes_be(&value.to_be_bytes()), - MemoryValue::Integer(value, _) => value, - } - } - - pub fn bit_size(&self) -> u32 { - match self { - MemoryValue::Field(_) => F::max_num_bits(), - MemoryValue::Integer(_, bit_size) => *bit_size, + MemoryValue::Integer(value, _) => F::from(*value), } } - pub fn to_usize(&self) -> usize { - assert!( - self.bit_size() == MEMORY_ADDRESSING_BIT_SIZE, - "value is not typed as brillig usize" - ); - self.extract_integer().unwrap().0.try_into().unwrap() - } - pub fn expect_field(&self) -> Result<&F, MemoryTypeError> { match self { MemoryValue::Integer(_, bit_size) => Err(MemoryTypeError::MismatchedBitSize { - value_bit_size: *bit_size, + value_bit_size: (*bit_size).into(), expected_bit_size: F::max_num_bits(), }), MemoryValue::Field(field) => Ok(field), @@ -119,21 +103,21 @@ impl MemoryValue { pub fn expect_integer_with_bit_size( &self, - expected_bit_size: u32, - ) -> Result<&BigUint, MemoryTypeError> { + expected_bit_size: IntegerBitSize, + ) -> Result { match self { MemoryValue::Integer(value, bit_size) => { if *bit_size != expected_bit_size { return Err(MemoryTypeError::MismatchedBitSize { - value_bit_size: *bit_size, - expected_bit_size, + value_bit_size: (*bit_size).into(), + expected_bit_size: expected_bit_size.into(), }); } - Ok(value) + Ok(*value) } MemoryValue::Field(_) => Err(MemoryTypeError::MismatchedBitSize { value_bit_size: F::max_num_bits(), - expected_bit_size, + expected_bit_size: expected_bit_size.into(), }), } } @@ -144,12 +128,7 @@ impl std::fmt::Display for MemoryValue { match self { MemoryValue::Field(value) => write!(f, "{}: field", value), MemoryValue::Integer(value, bit_size) => { - let typ = match bit_size { - 0 => "null".to_string(), - 1 => "bool".to_string(), - _ => format!("u{}", bit_size), - }; - write!(f, "{}: {}", value, typ) + write!(f, "{}: {}", value, bit_size) } } } @@ -157,62 +136,44 @@ impl std::fmt::Display for MemoryValue { impl Default for MemoryValue { fn default() -> Self { - MemoryValue::new_integer(BigUint::zero(), 0) - } -} - -impl From for MemoryValue { - fn from(value: usize) -> Self { - MemoryValue::new_integer(value.into(), MEMORY_ADDRESSING_BIT_SIZE) + MemoryValue::new_integer(0, IntegerBitSize::U0) } } -impl From for MemoryValue { - fn from(value: u32) -> Self { - MemoryValue::new_integer(value.into(), 32) - } -} - -impl From for MemoryValue { - fn from(value: u64) -> Self { - MemoryValue::new_integer(value.into(), 64) +impl From for MemoryValue { + fn from(value: bool) -> Self { + let value = if value { 1 } else { 0 }; + MemoryValue::new_integer(value, IntegerBitSize::U1) } } impl From for MemoryValue { fn from(value: u8) -> Self { - MemoryValue::new_integer(value.into(), 8) + MemoryValue::new_integer(value.into(), IntegerBitSize::U8) } } -impl From for MemoryValue { - fn from(value: bool) -> Self { - let value = if value { BigUint::one() } else { BigUint::zero() }; - MemoryValue::new_integer(value, 1) +impl From for MemoryValue { + fn from(value: usize) -> Self { + MemoryValue::new_integer(value as u128, MEMORY_ADDRESSING_BIT_SIZE) } } -impl TryFrom> for u64 { - type Error = MemoryTypeError; - - fn try_from(memory_value: MemoryValue) -> Result { - memory_value.expect_integer_with_bit_size(64).map(|value| value.try_into().unwrap()) +impl From for MemoryValue { + fn from(value: u32) -> Self { + MemoryValue::new_integer(value.into(), IntegerBitSize::U32) } } -impl TryFrom> for u32 { - type Error = MemoryTypeError; - - fn try_from(memory_value: MemoryValue) -> Result { - memory_value.expect_integer_with_bit_size(32).map(|value| value.try_into().unwrap()) +impl From for MemoryValue { + fn from(value: u64) -> Self { + MemoryValue::new_integer(value.into(), IntegerBitSize::U64) } } -impl TryFrom> for u8 { - type Error = MemoryTypeError; - - fn try_from(memory_value: MemoryValue) -> Result { - memory_value.expect_integer_with_bit_size(8).map(|value| value.try_into().unwrap()) +impl From for MemoryValue { + fn from(value: u128) -> Self { + MemoryValue::new_integer(value, IntegerBitSize::U128) } } @@ -220,7 +181,7 @@ impl TryFrom> for bool { type Error = MemoryTypeError; fn try_from(memory_value: MemoryValue) -> Result { - let as_integer = memory_value.expect_integer_with_bit_size(1)?; + let as_integer = memory_value.expect_integer_with_bit_size(IntegerBitSize::U1)?; if as_integer.is_zero() { Ok(false) @@ -232,49 +193,35 @@ impl TryFrom> for bool { } } -impl TryFrom<&MemoryValue> for u64 { +impl TryFrom> for u8 { type Error = MemoryTypeError; - fn try_from(memory_value: &MemoryValue) -> Result { - memory_value.expect_integer_with_bit_size(64).map(|value| { - value.try_into().expect("memory_value has been asserted to contain a 64 bit integer") - }) + fn try_from(memory_value: MemoryValue) -> Result { + memory_value.expect_integer_with_bit_size(IntegerBitSize::U8).map(|value| value as u8) } } -impl TryFrom<&MemoryValue> for u32 { +impl TryFrom> for u32 { type Error = MemoryTypeError; - fn try_from(memory_value: &MemoryValue) -> Result { - memory_value.expect_integer_with_bit_size(32).map(|value| { - value.try_into().expect("memory_value has been asserted to contain a 32 bit integer") - }) + fn try_from(memory_value: MemoryValue) -> Result { + memory_value.expect_integer_with_bit_size(IntegerBitSize::U32).map(|value| value as u32) } } -impl TryFrom<&MemoryValue> for u8 { +impl TryFrom> for u64 { type Error = MemoryTypeError; - fn try_from(memory_value: &MemoryValue) -> Result { - memory_value.expect_integer_with_bit_size(8).map(|value| { - value.try_into().expect("memory_value has been asserted to contain an 8 bit integer") - }) + fn try_from(memory_value: MemoryValue) -> Result { + memory_value.expect_integer_with_bit_size(IntegerBitSize::U64).map(|value| value as u64) } } -impl TryFrom<&MemoryValue> for bool { +impl TryFrom> for u128 { type Error = MemoryTypeError; - fn try_from(memory_value: &MemoryValue) -> Result { - let as_integer = memory_value.expect_integer_with_bit_size(1)?; - - if as_integer.is_zero() { - Ok(false) - } else if as_integer.is_one() { - Ok(true) - } else { - unreachable!("value typed as bool is greater than one") - } + fn try_from(memory_value: MemoryValue) -> Result { + memory_value.expect_integer_with_bit_size(IntegerBitSize::U128) } } @@ -288,7 +235,7 @@ pub struct Memory { impl Memory { /// Gets the value at pointer pub fn read(&self, ptr: MemoryAddress) -> MemoryValue { - self.inner.get(ptr.to_usize()).cloned().unwrap_or_default() + self.inner.get(ptr.to_usize()).copied().unwrap_or_default() } pub fn read_ref(&self, ptr: MemoryAddress) -> MemoryAddress { @@ -321,7 +268,7 @@ impl Memory { /// Sets the values after pointer `ptr` to `values` pub fn write_slice(&mut self, ptr: MemoryAddress, values: &[MemoryValue]) { self.resize_to_fit(ptr.to_usize() + values.len()); - self.inner[ptr.to_usize()..(ptr.to_usize() + values.len())].clone_from_slice(values); + self.inner[ptr.to_usize()..(ptr.to_usize() + values.len())].copy_from_slice(values); } /// Returns the values of the memory diff --git a/aztec_macros/src/transforms/contract_interface.rs b/aztec_macros/src/transforms/contract_interface.rs index e79cee66407..56107de77c5 100644 --- a/aztec_macros/src/transforms/contract_interface.rs +++ b/aztec_macros/src/transforms/contract_interface.rs @@ -1,15 +1,18 @@ +use acvm::acir::AcirField; + use noirc_errors::Location; use noirc_frontend::ast::{Ident, NoirFunction, UnresolvedTypeData}; use noirc_frontend::{ graph::CrateId, - macros_api::{FileId, HirContext, HirExpression, HirLiteral, HirStatement}, + macros_api::{FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement}, parse_program, parser::SortedModule, Type, }; +use tiny_keccak::{Hasher, Keccak}; + use crate::utils::{ - constants::SELECTOR_PLACEHOLDER, errors::AztecMacroError, hir_utils::{collect_crate_structs, get_contract_module_data, signature_of_type}, }; @@ -64,11 +67,6 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call .join(", "); let fn_return_type: noirc_frontend::ast::UnresolvedType = func.return_type(); - let fn_selector = format!( - "dep::aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature(\"{}\")", - SELECTOR_PLACEHOLDER - ); - let parameters = func.parameters(); let is_void = if matches!(fn_return_type.typ, UnresolvedTypeData::Unit) { "Void" } else { "" }; let is_static = if is_static_call { "Static" } else { "" }; @@ -160,7 +158,7 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call let fn_body = format!( "{} - let selector = {}; + let selector = dep::aztec::protocol_types::abis::function_selector::FunctionSelector::from_field(0); dep::aztec::context::{}{}{}CallInterface {{ target_contract: self.target_contract, selector, @@ -172,7 +170,6 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call {} }}", args, - fn_selector, aztec_visibility, is_static, is_void, @@ -291,27 +288,34 @@ pub fn generate_contract_interface( Ok(()) } -fn compute_fn_signature(fn_name: &str, parameters: &[Type]) -> String { - format!( +fn compute_fn_signature_hash(fn_name: &str, parameters: &[Type]) -> u32 { + let signature = format!( "{}({})", fn_name, parameters.iter().map(signature_of_type).collect::>().join(",") - ) + ); + let mut keccak = Keccak::v256(); + let mut result = [0u8; 32]; + keccak.update(signature.as_bytes()); + keccak.finalize(&mut result); + // Take the first 4 bytes of the hash and convert them to an integer + // If you change the following value you have to change NUM_BYTES_PER_NOTE_TYPE_ID in l1_note_payload.ts as well + let num_bytes_per_note_type_id = 4; + u32::from_be_bytes(result[0..num_bytes_per_note_type_id].try_into().unwrap()) } // Updates the function signatures in the contract interface with the actual ones, replacing the placeholder. -// This is done by locating the contract interface struct, its functions (stubs) and assuming the last statement of each -// is the constructor for a CallInterface. This constructor has a selector field that holds a -// FunctionSelector::from_signature function that receives the signature as a string literal. +// This is done by locating the contract interface struct, its functions (stubs) and assuming the second to last statement of each +// is a let statement initializing the selector with a FunctionSelector::from_field call. pub fn update_fn_signatures_in_contract_interface( crate_id: &CrateId, context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { - if let Some((name, _, file_id)) = get_contract_module_data(context, crate_id) { + if let Some((struct_name, _, file_id)) = get_contract_module_data(context, crate_id) { let maybe_interface_struct = collect_crate_structs(crate_id, context).iter().find_map(|struct_id| { let r#struct = context.def_interner.get_struct(*struct_id); - if r#struct.borrow().name.0.contents == name { + if r#struct.borrow().name.0.contents == struct_name { Some(r#struct) } else { None @@ -329,7 +333,7 @@ pub fn update_fn_signatures_in_contract_interface( continue; } - let fn_signature = compute_fn_signature( + let fn_signature_hash = compute_fn_signature_hash( name, &fn_parameters .iter() @@ -381,14 +385,12 @@ pub fn update_fn_signatures_in_contract_interface( context.def_interner.expression(¤t_fn_signature_expression_id); match current_fn_signature_expression { - HirExpression::Literal(HirLiteral::Str(signature)) => { - if signature != SELECTOR_PLACEHOLDER { + HirExpression::Literal(HirLiteral::Integer(value, _)) => { + if !value.is_zero() { Err(( AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some(format!( - "Function signature argument must be a placeholder: {}", - SELECTOR_PLACEHOLDER - )), + secondary_message: Some( + "Function signature argument must be a placeholder with value 0".to_string()), }, file_id, )) @@ -397,20 +399,25 @@ pub fn update_fn_signatures_in_contract_interface( } } _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { + AztecMacroError::CouldNotGenerateContractInterface { secondary_message: Some( - "Function signature argument must be a literal string".to_string(), + "Function signature argument must be a literal field element" + .to_string(), ), }, file_id, )), }?; - context - .def_interner - .update_expression(current_fn_signature_expression_id, |expr| { - *expr = HirExpression::Literal(HirLiteral::Str(fn_signature)) - }); + context.def_interner.update_expression( + current_fn_signature_expression_id, + |expr| { + *expr = HirExpression::Literal(HirLiteral::Integer( + FieldElement::from(fn_signature_hash as u128), + false, + )) + }, + ); } } } diff --git a/aztec_macros/src/transforms/note_interface.rs b/aztec_macros/src/transforms/note_interface.rs index 49525fc2ae1..3233e12ab73 100644 --- a/aztec_macros/src/transforms/note_interface.rs +++ b/aztec_macros/src/transforms/note_interface.rs @@ -73,6 +73,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt generics: vec![], methods: vec![], where_clause: vec![], + is_comptime: false, }; module.impls.push(default_impl.clone()); module.impls.last_mut().unwrap() @@ -100,7 +101,9 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt }) .collect::, _>>()?; let [note_serialized_len, note_bytes_len]: [_; 2] = - note_interface_generics.try_into().unwrap(); + note_interface_generics.try_into().expect( + "NoteInterface must be generic over 2 types, NOTE_FIELDS_LEN and NOTE_BYTES_LEN", + ); // Automatically inject the header field if it's not present let (header_field_name, _) = if let Some(existing_header) = diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs index c302dd87aa5..1c6ef634070 100644 --- a/aztec_macros/src/transforms/storage.rs +++ b/aztec_macros/src/transforms/storage.rs @@ -248,6 +248,7 @@ pub fn generate_storage_implementation( methods: vec![(init, Span::default())], where_clause: vec![], + is_comptime: false, }; module.impls.push(storage_impl); diff --git a/aztec_macros/src/utils/constants.rs b/aztec_macros/src/utils/constants.rs index 2178f7a2526..3e93b2aa545 100644 --- a/aztec_macros/src/utils/constants.rs +++ b/aztec_macros/src/utils/constants.rs @@ -1,3 +1,2 @@ pub const FUNCTION_TREE_HEIGHT: u32 = 5; pub const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); -pub const SELECTOR_PLACEHOLDER: &str = "SELECTOR_PLACEHOLDER"; diff --git a/aztec_macros/src/utils/hir_utils.rs b/aztec_macros/src/utils/hir_utils.rs index 7198ed5bd3d..200ce3099cb 100644 --- a/aztec_macros/src/utils/hir_utils.rs +++ b/aztec_macros/src/utils/hir_utils.rs @@ -1,16 +1,16 @@ use acvm::acir::AcirField; use iter_extended::vecmap; -use noirc_errors::Location; +use noirc_errors::{CustomDiagnostic, Location}; use noirc_frontend::ast; +use noirc_frontend::elaborator::Elaborator; +use noirc_frontend::hir::def_collector::dc_crate::{ + CollectedItems, UnresolvedFunctions, UnresolvedGlobal, +}; use noirc_frontend::macros_api::{HirExpression, HirLiteral}; use noirc_frontend::node_interner::{NodeInterner, TraitImplKind}; use noirc_frontend::{ graph::CrateId, - hir::{ - def_map::{LocalModuleId, ModuleId}, - resolution::{path_resolver::StandardPathResolver, resolver::Resolver}, - type_check::type_check_func, - }, + hir::def_map::{LocalModuleId, ModuleId}, macros_api::{FileId, HirContext, MacroError, ModuleDefId, StructId}, node_interner::{FuncId, TraitId}, Shared, StructType, Type, @@ -190,24 +190,18 @@ pub fn inject_fn( span: None, })?; - let def_maps = &mut context.def_maps; - - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: module_id, krate: *crate_id }); - - let resolver = Resolver::new(&mut context.def_interner, &path_resolver, def_maps, file_id); - - let (hir_func, meta, _) = resolver.resolve_function(func, func_id); + let mut items = CollectedItems::default(); + let functions = vec![(module_id, func_id, func)]; + let trait_id = None; + items.functions.push(UnresolvedFunctions { file_id, functions, trait_id, self_type: None }); - context.def_interner.push_fn_meta(meta, func_id); - context.def_interner.update_fn(func_id, hir_func); - - let errors = type_check_func(&mut context.def_interner, func_id); + let mut errors = Elaborator::elaborate(context, *crate_id, items, None); + errors.retain(|(error, _)| !CustomDiagnostic::from(error).is_warning()); if !errors.is_empty() { return Err(MacroError { primary_message: "Failed to type check autogenerated function".to_owned(), - secondary_message: Some(errors.iter().map(|err| err.to_string()).collect::()), + secondary_message: Some(errors.iter().map(|err| err.0.to_string()).collect::()), span: None, }); } @@ -227,6 +221,7 @@ pub fn inject_global( let global_id = context.def_interner.push_empty_global( name.clone(), module_id, + *crate_id, file_id, global.attributes.clone(), false, @@ -243,17 +238,10 @@ pub fn inject_global( ) }); - let def_maps = &mut context.def_maps; - - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: module_id, krate: *crate_id }); - - let mut resolver = Resolver::new(&mut context.def_interner, &path_resolver, def_maps, file_id); - - let hir_stmt = resolver.resolve_global_let(global, global_id); + let mut items = CollectedItems::default(); + items.globals.push(UnresolvedGlobal { file_id, module_id, global_id, stmt_def: global }); - let statement_id = context.def_interner.get_global(global_id).let_statement; - context.def_interner.replace_statement(statement_id, hir_stmt); + let _errors = Elaborator::elaborate(context, *crate_id, items, None); } pub fn fully_qualified_note_path(context: &HirContext, note_id: StructId) -> Option { diff --git a/compiler/integration-tests/package.json b/compiler/integration-tests/package.json index 0638fffe547..a88e55b2321 100644 --- a/compiler/integration-tests/package.json +++ b/compiler/integration-tests/package.json @@ -25,7 +25,7 @@ "eslint": "^8.57.0", "eslint-plugin-prettier": "^5.1.3", "ethers": "^6.7.1", - "hardhat": "^2.17.4", + "hardhat": "^2.22.6", "prettier": "3.2.5", "smol-toml": "^1.1.2", "toml": "^3.0.0", diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 2b0769e30d4..dd774a1eeec 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -52,9 +52,9 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str = #[derive(Args, Clone, Debug, Default)] pub struct CompileOptions { - /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width, default_value = "4")] - pub expression_width: ExpressionWidth, + /// Specify the backend expression width that should be targeted + #[arg(long, value_parser = parse_expression_width)] + pub expression_width: Option, /// Force a full recompilation. #[arg(long = "force")] @@ -99,10 +99,6 @@ pub struct CompileOptions { #[arg(long, hide = true)] pub force_brillig: bool, - /// Use the deprecated name resolution & type checking passes instead of the elaborator - #[arg(long, hide = true)] - pub use_legacy: bool, - /// Enable printing results of comptime evaluation: provide a path suffix /// for the module to debug, e.g. "package_name/src/main.nr" #[arg(long)] @@ -113,7 +109,7 @@ pub struct CompileOptions { pub show_artifact_paths: bool, } -fn parse_expression_width(input: &str) -> Result { +pub fn parse_expression_width(input: &str) -> Result { use std::io::{Error, ErrorKind}; let width = input .parse::() @@ -262,15 +258,13 @@ pub fn check_crate( crate_id: CrateId, deny_warnings: bool, disable_macros: bool, - use_legacy: bool, debug_comptime_in_file: Option<&str>, ) -> CompilationResult<()> { let macros: &[&dyn MacroProcessor] = if disable_macros { &[] } else { &[&aztec_macros::AztecMacro as &dyn MacroProcessor] }; let mut errors = vec![]; - let diagnostics = - CrateDefMap::collect_defs(crate_id, context, use_legacy, debug_comptime_in_file, macros); + let diagnostics = CrateDefMap::collect_defs(crate_id, context, debug_comptime_in_file, macros); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic = CustomDiagnostic::from(&error); diagnostic.in_file(file_id) @@ -307,7 +301,6 @@ pub fn compile_main( crate_id, options.deny_warnings, options.disable_macros, - options.use_legacy, options.debug_comptime_in_file.as_deref(), )?; @@ -349,7 +342,6 @@ pub fn compile_contract( crate_id, options.deny_warnings, options.disable_macros, - options.use_legacy, options.debug_comptime_in_file.as_deref(), )?; diff --git a/compiler/noirc_driver/tests/stdlib_warnings.rs b/compiler/noirc_driver/tests/stdlib_warnings.rs index 0e098d0d087..d2474444d13 100644 --- a/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -25,7 +25,7 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let root_crate_id = prepare_crate(&mut context, file_name); let ((), warnings) = - noirc_driver::check_crate(&mut context, root_crate_id, false, false, false, None)?; + noirc_driver::check_crate(&mut context, root_crate_id, false, false, None)?; assert_eq!(warnings, Vec::new(), "stdlib is producing {} warnings", warnings.len()); diff --git a/compiler/noirc_errors/src/position.rs b/compiler/noirc_errors/src/position.rs index 007ec58ca27..9f9879e1d1b 100644 --- a/compiler/noirc_errors/src/position.rs +++ b/compiler/noirc_errors/src/position.rs @@ -90,6 +90,10 @@ impl Span { self.start() <= other.start() && self.end() >= other.end() } + pub fn intersects(&self, other: &Span) -> bool { + self.end() > other.start() && self.start() < other.end() + } + pub fn is_smaller(&self, other: &Span) -> bool { let self_distance = self.end() - self.start(); let other_distance = other.end() - other.start(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index aa9cb8cd7a3..8e2b2fb7a29 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -6,7 +6,7 @@ use acvm::{ use crate::brillig::brillig_ir::{ brillig_variable::{BrilligVariable, BrilligVector, SingleAddrVariable}, debug_show::DebugToString, - BrilligBinaryOp, BrilligContext, + BrilligContext, }; /// Transforms SSA's black box function calls into the corresponding brillig instructions @@ -239,11 +239,10 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::RecursiveAggregation => {} BlackBoxFunc::BigIntAdd => { if let ( - [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], - [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(modulus_id)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(_lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(_rhs_modulus)], + [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(_modulus_id)], ) = (function_arguments, function_results) { - prepare_bigint_output(brillig_context, lhs_modulus, rhs_modulus, modulus_id); brillig_context.black_box_op_instruction(BlackBoxOp::BigIntAdd { lhs: lhs.address, rhs: rhs.address, @@ -257,11 +256,10 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntSub => { if let ( - [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], - [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(modulus_id)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(_lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(_rhs_modulus)], + [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(_modulus_id)], ) = (function_arguments, function_results) { - prepare_bigint_output(brillig_context, lhs_modulus, rhs_modulus, modulus_id); brillig_context.black_box_op_instruction(BlackBoxOp::BigIntSub { lhs: lhs.address, rhs: rhs.address, @@ -275,11 +273,10 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntMul => { if let ( - [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], - [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(modulus_id)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(_lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(_rhs_modulus)], + [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(_modulus_id)], ) = (function_arguments, function_results) { - prepare_bigint_output(brillig_context, lhs_modulus, rhs_modulus, modulus_id); brillig_context.black_box_op_instruction(BlackBoxOp::BigIntMul { lhs: lhs.address, rhs: rhs.address, @@ -293,11 +290,10 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntDiv => { if let ( - [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], - [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(modulus_id)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(_lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(_rhs_modulus)], + [BrilligVariable::SingleAddr(output), BrilligVariable::SingleAddr(_modulus_id)], ) = (function_arguments, function_results) { - prepare_bigint_output(brillig_context, lhs_modulus, rhs_modulus, modulus_id); brillig_context.black_box_op_instruction(BlackBoxOp::BigIntDiv { lhs: lhs.address, rhs: rhs.address, @@ -416,27 +412,3 @@ fn convert_array_or_vector( ), } } - -fn prepare_bigint_output( - brillig_context: &mut BrilligContext, - lhs_modulus: &SingleAddrVariable, - rhs_modulus: &SingleAddrVariable, - modulus_id: &SingleAddrVariable, -) { - // Check moduli - let condition = brillig_context.allocate_register(); - let condition_adr = SingleAddrVariable { address: condition, bit_size: 1 }; - brillig_context.binary_instruction( - *lhs_modulus, - *rhs_modulus, - condition_adr, - BrilligBinaryOp::Equals, - ); - brillig_context.codegen_constrain( - condition_adr, - Some("moduli should be identical in BigInt operation".to_string()), - ); - brillig_context.deallocate_register(condition); - - brillig_context.mov_instruction(modulus_id.address, lhs_modulus.address); -} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs index ae159f2c45c..dff4da56c1e 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs @@ -1,6 +1,6 @@ -use acvm::{ - acir::brillig::{BinaryFieldOp, BinaryIntOp, MemoryAddress, Opcode as BrilligOpcode}, - acir::AcirField, +use acvm::acir::{ + brillig::{BinaryFieldOp, BitSize, MemoryAddress, Opcode as BrilligOpcode}, + AcirField, }; use crate::brillig::brillig_ir::artifact::GeneratedBrillig; @@ -28,7 +28,7 @@ pub(crate) fn directive_invert() -> GeneratedBrillig { BrilligOpcode::Const { destination: zero_const, value: F::from(0_usize), - bit_size: F::max_num_bits(), + bit_size: BitSize::Field, }, BrilligOpcode::BinaryFieldOp { op: BinaryFieldOp::Equals, @@ -42,7 +42,7 @@ pub(crate) fn directive_invert() -> GeneratedBrillig { BrilligOpcode::Const { destination: one_const, value: F::one(), - bit_size: F::max_num_bits(), + bit_size: BitSize::Field, }, // Divide 1 by the input, and set the result of the division into register (0) BrilligOpcode::BinaryFieldOp { @@ -67,105 +67,47 @@ pub(crate) fn directive_invert() -> GeneratedBrillig { /// (a/b, a-a/b*b) /// } /// ``` -pub(crate) fn directive_quotient(bit_size: u32) -> GeneratedBrillig { +pub(crate) fn directive_quotient() -> GeneratedBrillig { // `a` is (0) (i.e register index 0) // `b` is (1) - // TODO: The only difference between these implementations is the integer version will truncate the input to the `bit_size` via cast. - // Once we deduplicate brillig functions then we can modify this so that fields and integers share the same quotient function. - if bit_size >= F::max_num_bits() { - // Field version - GeneratedBrillig { - byte_code: vec![ - BrilligOpcode::CalldataCopy { - destination_address: MemoryAddress::from(0), - size: 2, - offset: 0, - }, - // No cast, since calldata is typed as field by default - //q = a/b is set into register (2) - BrilligOpcode::BinaryFieldOp { - op: BinaryFieldOp::IntegerDiv, // We want integer division, not field division! - lhs: MemoryAddress::from(0), - rhs: MemoryAddress::from(1), - destination: MemoryAddress::from(2), - }, - //(1)= q*b - BrilligOpcode::BinaryFieldOp { - op: BinaryFieldOp::Mul, - lhs: MemoryAddress::from(2), - rhs: MemoryAddress::from(1), - destination: MemoryAddress::from(1), - }, - //(1) = a-q*b - BrilligOpcode::BinaryFieldOp { - op: BinaryFieldOp::Sub, - lhs: MemoryAddress::from(0), - rhs: MemoryAddress::from(1), - destination: MemoryAddress::from(1), - }, - //(0) = q - BrilligOpcode::Mov { - destination: MemoryAddress::from(0), - source: MemoryAddress::from(2), - }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 2 }, - ], - assert_messages: Default::default(), - locations: Default::default(), - } - } else { - // Integer version - GeneratedBrillig { - byte_code: vec![ - BrilligOpcode::CalldataCopy { - destination_address: MemoryAddress::from(0), - size: 2, - offset: 0, - }, - BrilligOpcode::Cast { - destination: MemoryAddress(0), - source: MemoryAddress(0), - bit_size, - }, - BrilligOpcode::Cast { - destination: MemoryAddress(1), - source: MemoryAddress(1), - bit_size, - }, - //q = a/b is set into register (2) - BrilligOpcode::BinaryIntOp { - op: BinaryIntOp::Div, - lhs: MemoryAddress::from(0), - rhs: MemoryAddress::from(1), - destination: MemoryAddress::from(2), - bit_size, - }, - //(1)= q*b - BrilligOpcode::BinaryIntOp { - op: BinaryIntOp::Mul, - lhs: MemoryAddress::from(2), - rhs: MemoryAddress::from(1), - destination: MemoryAddress::from(1), - bit_size, - }, - //(1) = a-q*b - BrilligOpcode::BinaryIntOp { - op: BinaryIntOp::Sub, - lhs: MemoryAddress::from(0), - rhs: MemoryAddress::from(1), - destination: MemoryAddress::from(1), - bit_size, - }, - //(0) = q - BrilligOpcode::Mov { - destination: MemoryAddress::from(0), - source: MemoryAddress::from(2), - }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 2 }, - ], - assert_messages: Default::default(), - locations: Default::default(), - } + GeneratedBrillig { + byte_code: vec![ + BrilligOpcode::CalldataCopy { + destination_address: MemoryAddress::from(0), + size: 2, + offset: 0, + }, + // No cast, since calldata is typed as field by default + //q = a/b is set into register (2) + BrilligOpcode::BinaryFieldOp { + op: BinaryFieldOp::IntegerDiv, // We want integer division, not field division! + lhs: MemoryAddress::from(0), + rhs: MemoryAddress::from(1), + destination: MemoryAddress::from(2), + }, + //(1)= q*b + BrilligOpcode::BinaryFieldOp { + op: BinaryFieldOp::Mul, + lhs: MemoryAddress::from(2), + rhs: MemoryAddress::from(1), + destination: MemoryAddress::from(1), + }, + //(1) = a-q*b + BrilligOpcode::BinaryFieldOp { + op: BinaryFieldOp::Sub, + lhs: MemoryAddress::from(0), + rhs: MemoryAddress::from(1), + destination: MemoryAddress::from(1), + }, + //(0) = q + BrilligOpcode::Mov { + destination: MemoryAddress::from(0), + source: MemoryAddress::from(2), + }, + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 2 }, + ], + assert_messages: Default::default(), + locations: Default::default(), } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 80367d07635..21f8722c116 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -127,7 +127,8 @@ pub(crate) mod tests { use std::vec; use acvm::acir::brillig::{ - ForeignCallParam, ForeignCallResult, HeapArray, HeapVector, MemoryAddress, ValueOrArray, + BitSize, ForeignCallParam, ForeignCallResult, HeapArray, HeapVector, IntegerBitSize, + MemoryAddress, ValueOrArray, }; use acvm::brillig_vm::brillig::HeapValueType; use acvm::brillig_vm::{VMStatus, VM}; @@ -253,9 +254,11 @@ pub(crate) mod tests { context.foreign_call_instruction( "make_number_sequence".into(), &[ValueOrArray::MemoryAddress(r_input_size)], - &[HeapValueType::Simple(32)], + &[HeapValueType::Simple(BitSize::Integer(IntegerBitSize::U32))], &[ValueOrArray::HeapVector(HeapVector { pointer: r_stack, size: r_output_size })], - &[HeapValueType::Vector { value_types: vec![HeapValueType::Simple(32)] }], + &[HeapValueType::Vector { + value_types: vec![HeapValueType::Simple(BitSize::Integer(IntegerBitSize::U32))], + }], ); // push stack frame by r_returned_size context.memory_op_instruction(r_stack, r_output_size, r_stack, BrilligBinaryOp::Add); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs index cf1fd555191..3200bd54265 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -1,5 +1,5 @@ use acvm::{ - acir::AcirField, + acir::{brillig::BitSize, AcirField}, brillig_vm::brillig::{HeapArray, HeapValueType, HeapVector, MemoryAddress, ValueOrArray}, FieldElement, }; @@ -126,9 +126,9 @@ impl BrilligVariable { pub(crate) fn type_to_heap_value_type(typ: &Type) -> HeapValueType { match typ { - Type::Numeric(_) | Type::Reference(_) | Type::Function => { - HeapValueType::Simple(get_bit_size_from_ssa_type(typ)) - } + Type::Numeric(_) | Type::Reference(_) | Type::Function => HeapValueType::Simple( + BitSize::try_from_u32::(get_bit_size_from_ssa_type(typ)).unwrap(), + ), Type::Array(elem_type, size) => HeapValueType::Array { value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(), size: typ.element_size() * size, diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index b1cb2b19764..d07b411f5a1 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -1,8 +1,10 @@ -use acvm::{ - acir::brillig::{BlackBoxOp, HeapArray}, - acir::AcirField, +use acvm::acir::{ + brillig::{BlackBoxOp, HeapArray, IntegerBitSize}, + AcirField, }; +use crate::brillig::brillig_ir::BrilligBinaryOp; + use super::{ brillig_variable::{BrilligVector, SingleAddrVariable}, debug_show::DebugToString, @@ -24,12 +26,39 @@ impl BrilligContext { value_to_truncate.bit_size ); - // We cast back and forth to ensure that the value is truncated. - let intermediate_register = - SingleAddrVariable { address: self.allocate_register(), bit_size }; - self.cast_instruction(intermediate_register, value_to_truncate); - self.cast_instruction(destination_of_truncated_value, intermediate_register); - self.deallocate_single_addr(intermediate_register); + if bit_size == value_to_truncate.bit_size { + self.mov_instruction(destination_of_truncated_value.address, value_to_truncate.address); + return; + } + + // If we are truncating a value down to a natively supported integer, we can just use the cast instruction + if IntegerBitSize::try_from(bit_size).is_ok() { + // We cast back and forth to ensure that the value is truncated. + let intermediate_register = SingleAddrVariable::new(self.allocate_register(), bit_size); + + self.cast_instruction(intermediate_register, value_to_truncate); + self.cast_instruction(destination_of_truncated_value, intermediate_register); + + self.deallocate_single_addr(intermediate_register); + return; + } + + // If the bit size we are truncating down to is not a natively supported integer, we need to use a modulo operation. + + // The modulus is guaranteed to fit, since we are truncating down to a bit size that is strictly less than the value_to_truncate.bit_size + let modulus_var = self.make_constant_instruction( + F::from(2_usize).pow(&F::from(bit_size as u128)), + value_to_truncate.bit_size, + ); + + self.binary_instruction( + value_to_truncate, + modulus_var, + destination_of_truncated_value, + BrilligBinaryOp::Modulo, + ); + + self.deallocate_single_addr(modulus_var); } /// Issues a to_radix instruction. This instruction will write the modulus of the source register diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs index a614f93fa30..69a6b12c9b0 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs @@ -1,9 +1,11 @@ use acvm::{ - acir::brillig::{ - BinaryFieldOp, BinaryIntOp, BlackBoxOp, HeapArray, HeapValueType, MemoryAddress, - Opcode as BrilligOpcode, ValueOrArray, + acir::{ + brillig::{ + BinaryFieldOp, BinaryIntOp, BitSize, BlackBoxOp, HeapArray, HeapValueType, + MemoryAddress, Opcode as BrilligOpcode, ValueOrArray, + }, + AcirField, }, - acir::AcirField, FieldElement, }; @@ -99,7 +101,7 @@ impl BrilligContext { self.push_opcode(BrilligOpcode::BinaryIntOp { op: operation.into(), destination: result.address, - bit_size: lhs.bit_size, + bit_size: lhs.bit_size.try_into().unwrap(), lhs: lhs.address, rhs: rhs.address, }); @@ -363,7 +365,7 @@ impl BrilligContext { self.push_opcode(BrilligOpcode::Cast { destination: destination.address, source: source.address, - bit_size: destination.bit_size, + bit_size: BitSize::try_from_u32::(destination.bit_size).unwrap(), }); } @@ -405,7 +407,7 @@ impl BrilligContext { self.push_opcode(BrilligOpcode::Const { destination: result.address, value: constant, - bit_size: result.bit_size, + bit_size: BitSize::try_from_u32::(result.bit_size).unwrap(), }); } } diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 820374df9c1..81327cec013 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -190,12 +190,19 @@ pub fn create_program( let recursive = program.recursive; let ArtifactsAndWarnings((generated_acirs, generated_brillig, error_types), ssa_level_warnings) = optimize_into_acir(program, options)?; - assert_eq!( - generated_acirs.len(), - func_sigs.len(), - "The generated ACIRs should match the supplied function signatures" - ); - + if options.force_brillig_output { + assert_eq!( + generated_acirs.len(), + 1, + "Only the main ACIR is expected when forcing Brillig output" + ); + } else { + assert_eq!( + generated_acirs.len(), + func_sigs.len(), + "The generated ACIRs should match the supplied function signatures" + ); + } let mut program_artifact = SsaProgramArtifact::new(generated_brillig, error_types); // Add warnings collected at the Ssa stage diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 74149af25ef..629cc491ba6 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -768,7 +768,7 @@ impl AcirContext { let [q_value, r_value]: [AcirValue; 2] = self .brillig_call( predicate, - &brillig_directive::directive_quotient(bit_size + 1), + &brillig_directive::directive_quotient(), vec![ AcirValue::Var(lhs, AcirType::unsigned(bit_size)), AcirValue::Var(rhs, AcirType::unsigned(bit_size)), @@ -777,7 +777,7 @@ impl AcirContext { true, false, PLACEHOLDER_BRILLIG_INDEX, - Some(BrilligStdlibFunc::Quotient(bit_size + 1)), + Some(BrilligStdlibFunc::Quotient), )? .try_into() .expect("quotient only returns two values"); diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 9d29d1d24d6..1395d04f99e 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -74,17 +74,14 @@ pub(crate) struct GeneratedAcir { #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub(crate) enum BrilligStdlibFunc { Inverse, - // The Brillig quotient code is different depending upon the bit size. - Quotient(u32), + Quotient, } impl BrilligStdlibFunc { pub(crate) fn get_generated_brillig(&self) -> GeneratedBrillig { match self { BrilligStdlibFunc::Inverse => brillig_directive::directive_invert(), - BrilligStdlibFunc::Quotient(bit_size) => { - brillig_directive::directive_quotient(*bit_size) - } + BrilligStdlibFunc::Quotient => brillig_directive::directive_quotient(), } } } @@ -294,24 +291,7 @@ impl GeneratedAcir { outputs: (outputs[0], outputs[1], outputs[2]), }, BlackBoxFunc::Keccak256 => { - let var_message_size = match inputs.to_vec().pop() { - Some(var_message_size) => var_message_size[0], - None => { - return Err(InternalError::MissingArg { - name: "".to_string(), - arg: "message_size".to_string(), - call_stack: self.call_stack.clone(), - }); - } - }; - - BlackBoxFuncCall::Keccak256 { - inputs: inputs[0].clone(), - var_message_size, - outputs: outputs - .try_into() - .expect("Compiler should generate correct size outputs"), - } + unreachable!("unexpected BlackBox {}", func_name.to_string()) } BlackBoxFunc::Keccakf1600 => BlackBoxFuncCall::Keccakf1600 { inputs: inputs[0] diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 585afc27919..a75aabe6a03 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -975,6 +975,8 @@ impl<'a> Context<'a> { .into()) } }; + // Ensure that array id is fully resolved. + let array = dfg.resolve(array); let array_id = dfg.resolve(array); let array_typ = dfg.type_of_value(array_id); @@ -992,7 +994,6 @@ impl<'a> Context<'a> { // If we find one, we will use it when computing the index under the enable_side_effect predicate // If not, array_get(..) will use a fallback costing one multiplication in the worst case. // cf. https://github.com/noir-lang/noir/pull/4971 - // For simplicity we compute the offset only for simple arrays let is_simple_array = dfg.instruction_results(instruction).len() == 1 && can_omit_element_sizes_array(&array_typ); @@ -1123,13 +1124,14 @@ impl<'a> Context<'a> { /// It is a dummy value because in the case of a false predicate, the value stored at the requested index will be itself. fn convert_array_operation_inputs( &mut self, - array: ValueId, + array_id: ValueId, dfg: &DataFlowGraph, index: ValueId, store_value: Option, offset: usize, ) -> Result<(AcirVar, Option), RuntimeError> { - let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?; + let array_typ = dfg.type_of_value(array_id); + let block_id = self.ensure_array_is_initialized(array_id, dfg)?; let index_var = self.convert_numeric_value(index, dfg)?; let index_var = self.get_flattened_index(&array_typ, array_id, index_var, dfg)?; @@ -1248,22 +1250,22 @@ impl<'a> Context<'a> { dfg: &DataFlowGraph, mut index_side_effect: bool, ) -> Result { - let (array_id, _, block_id) = self.check_array_is_initialized(array, dfg)?; + let block_id = self.ensure_array_is_initialized(array, dfg)?; let results = dfg.instruction_results(instruction); let res_typ = dfg.type_of_value(results[0]); // Get operations to call-data parameters are replaced by a get to the call-data-bus array if let Some(call_data) = self.data_bus.call_data { - if self.data_bus.call_data_map.contains_key(&array_id) { + if self.data_bus.call_data_map.contains_key(&array) { // TODO: the block_id of call-data must be notified to the backend // TODO: should we do the same for return-data? let type_size = res_typ.flattened_size(); let type_size = self.acir_context.add_constant(FieldElement::from(type_size as i128)); let offset = self.acir_context.mul_var(var_index, type_size)?; - let bus_index = self.acir_context.add_constant(FieldElement::from( - self.data_bus.call_data_map[&array_id] as i128, - )); + let bus_index = self + .acir_context + .add_constant(FieldElement::from(self.data_bus.call_data_map[&array] as i128)); let new_index = self.acir_context.add_var(offset, bus_index)?; return self.array_get(instruction, call_data, new_index, dfg, index_side_effect); } @@ -1277,8 +1279,7 @@ impl<'a> Context<'a> { let mut value = self.array_get_value(&res_typ, block_id, &mut var_index)?; if let AcirValue::Var(value_var, typ) = &value { - let array_id = dfg.resolve(array_id); - let array_typ = dfg.type_of_value(array_id); + let array_typ = dfg.type_of_value(array); if let (Type::Numeric(numeric_type), AcirType::NumericType(num)) = (array_typ.first(), typ) { @@ -1362,7 +1363,7 @@ impl<'a> Context<'a> { } }; - let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?; + let block_id = self.ensure_array_is_initialized(array, dfg)?; // Every array has a length in its type, so we fetch that from // the SSA IR. @@ -1371,10 +1372,11 @@ impl<'a> Context<'a> { // However, this size is simply the capacity of a slice. The capacity is dependent upon the witness // and may contain data for which we want to restrict access. The true slice length is tracked in a // a separate SSA value and restrictions on slice indices should be generated elsewhere in the SSA. + let array_typ = dfg.type_of_value(array); let array_len = if !array_typ.contains_slice_element() { array_typ.flattened_size() } else { - self.flattened_slice_size(array_id, dfg) + self.flattened_slice_size(array, dfg) }; // Since array_set creates a new array, we create a new block ID for this @@ -1397,18 +1399,13 @@ impl<'a> Context<'a> { self.array_set_value(&store_value, result_block_id, &mut var_index)?; let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { - let acir_value = self.convert_value(array_id, dfg); - Some(self.init_element_type_sizes_array( - &array_typ, - array_id, - Some(&acir_value), - dfg, - )?) + let acir_value = self.convert_value(array, dfg); + Some(self.init_element_type_sizes_array(&array_typ, array, Some(&acir_value), dfg)?) } else { None }; - let value_types = self.convert_value(array_id, dfg).flat_numeric_types(); + let value_types = self.convert_value(array, dfg).flat_numeric_types(); // Compiler sanity check assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array"); @@ -1454,37 +1451,33 @@ impl<'a> Context<'a> { Ok(()) } - fn check_array_is_initialized( + fn ensure_array_is_initialized( &mut self, array: ValueId, dfg: &DataFlowGraph, - ) -> Result<(ValueId, Type, BlockId), RuntimeError> { - // Fetch the internal SSA ID for the array - let array_id = dfg.resolve(array); - - let array_typ = dfg.type_of_value(array_id); - + ) -> Result { // Use the SSA ID to get or create its block ID - let block_id = self.block_id(&array_id); + let block_id = self.block_id(&array); // Check if the array has already been initialized in ACIR gen // if not, we initialize it using the values from SSA let already_initialized = self.initialized_arrays.contains(&block_id); if !already_initialized { - let value = &dfg[array_id]; + let value = &dfg[array]; match value { Value::Array { .. } | Value::Instruction { .. } => { - let value = self.convert_value(array_id, dfg); + let value = self.convert_value(array, dfg); + let array_typ = dfg.type_of_value(array); let len = if !array_typ.contains_slice_element() { array_typ.flattened_size() } else { - self.flattened_slice_size(array_id, dfg) + self.flattened_slice_size(array, dfg) }; self.initialize_array(block_id, len, Some(value))?; } _ => { return Err(InternalError::General { - message: format!("Array {array_id} should be initialized"), + message: format!("Array {array} should be initialized"), call_stack: self.acir_context.get_call_stack(), } .into()); @@ -1492,7 +1485,7 @@ impl<'a> Context<'a> { } } - Ok((array_id, array_typ, block_id)) + Ok(block_id) } fn init_element_type_sizes_array( @@ -1746,7 +1739,7 @@ impl<'a> Context<'a> { /// Converts an SSA terminator's return values into their ACIR representations fn get_num_return_witnesses( - &mut self, + &self, terminator: &TerminatorInstruction, dfg: &DataFlowGraph, ) -> usize { @@ -1800,7 +1793,7 @@ impl<'a> Context<'a> { has_constant_return |= self.acir_context.is_constant(&acir_var); if is_databus { // We do not return value for the data bus. - self.check_array_is_initialized( + self.ensure_array_is_initialized( self.data_bus.return_data.expect( "`is_databus == true` implies `data_bus.return_data` is `Some`", ), @@ -2167,8 +2160,9 @@ impl<'a> Context<'a> { Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())]) } Intrinsic::AsSlice => { - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[0], dfg)?; + let slice_contents = arguments[0]; + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let result_block_id = self.block_id(&result_ids[1]); @@ -2212,8 +2206,9 @@ impl<'a> Context<'a> { Intrinsic::SlicePushBack => { // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let (slice_contents, slice_typ, _) = - self.check_array_is_initialized(arguments[1], dfg)?; + let slice_contents = arguments[1]; + let slice_typ = dfg.type_of_value(slice_contents); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); @@ -2279,9 +2274,8 @@ impl<'a> Context<'a> { Intrinsic::SlicePushFront => { // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - - let (slice_contents, slice_typ, _) = - self.check_array_is_initialized(arguments[1], dfg)?; + let slice_contents = arguments[1]; + let slice_typ = dfg.type_of_value(slice_contents); assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice: AcirValue = self.convert_value(slice_contents, dfg); @@ -2344,6 +2338,7 @@ impl<'a> Context<'a> { Intrinsic::SlicePopBack => { // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; @@ -2352,8 +2347,8 @@ impl<'a> Context<'a> { // the elements stored at that index will no longer be able to be accessed. let mut var_index = new_slice_length; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let mut popped_elements = Vec::new(); @@ -2378,9 +2373,11 @@ impl<'a> Context<'a> { Intrinsic::SlicePopFront => { // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; + + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let one = self.acir_context.add_constant(FieldElement::one()); @@ -2419,9 +2416,11 @@ impl<'a> Context<'a> { Intrinsic::SliceInsert => { // arguments = [slice_length, slice_contents, insert_index, ...elements_to_insert] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; + + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); @@ -2558,9 +2557,11 @@ impl<'a> Context<'a> { Intrinsic::SliceRemove => { // arguments = [slice_length, slice_contents, remove_index] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; + + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); @@ -3476,7 +3477,7 @@ mod test { if stdlib_func_index == 0 { assert!(matches!(brillig_stdlib_func, BrilligStdlibFunc::Inverse)); } else { - assert!(matches!(brillig_stdlib_func, BrilligStdlibFunc::Quotient(_))); + assert!(matches!(brillig_stdlib_func, BrilligStdlibFunc::Quotient)); } match opcode_location { diff --git a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index a063a7ff268..06325b31dd0 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -1,9 +1,11 @@ use iter_extended::vecmap; +use crate::ssa::ir::types::Type; + use super::{ basic_block::BasicBlockId, dfg::{CallStack, InsertInstructionResult}, - function::Function, + function::{Function, RuntimeType}, instruction::{Instruction, InstructionId}, value::ValueId, }; @@ -16,7 +18,10 @@ pub(crate) struct FunctionInserter<'f> { pub(crate) function: &'f mut Function, values: HashMap, - const_arrays: HashMap, ValueId>, + /// Map containing repeat array constants so that we do not initialize a new + /// array unnecessarily. An extra tuple field is included as part of the key to + /// distinguish between array/slice types. + const_arrays: HashMap<(im::Vector, Type), ValueId>, } impl<'f> FunctionInserter<'f> { @@ -37,15 +42,25 @@ impl<'f> FunctionInserter<'f> { let typ = typ.clone(); let new_array: im::Vector = array.iter().map(|id| self.resolve(*id)).collect(); - if self.const_arrays.get(&new_array) == Some(&value) { - value - } else { - let new_array_clone = new_array.clone(); - let new_id = self.function.dfg.make_array(new_array, typ); - self.values.insert(value, new_id); - self.const_arrays.insert(new_array_clone, new_id); - new_id - } + + if let Some(fetched_value) = + self.const_arrays.get(&(new_array.clone(), typ.clone())) + { + // Arrays in ACIR are immutable, but in Brillig arrays are copy-on-write + // so for function's with a Brillig runtime we make sure to check that value + // in our constants array map matches the resolved array value id. + if matches!(self.function.runtime(), RuntimeType::Acir(_)) { + return *fetched_value; + } else if *fetched_value == value { + return value; + } + }; + + let new_array_clone = new_array.clone(); + let new_id = self.function.dfg.make_array(new_array, typ.clone()); + self.values.insert(value, new_id); + self.const_arrays.insert((new_array_clone, typ), new_id); + new_id } _ => value, }, diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 281ab7c3057..ad01edbd0b2 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -461,28 +461,37 @@ fn simplify_black_box_func( BlackBoxFunc::SHA256 => simplify_hash(dfg, arguments, acvm::blackbox_solver::sha256), BlackBoxFunc::Blake2s => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake2s), BlackBoxFunc::Blake3 => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake3), - BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash - | BlackBoxFunc::Keccakf1600 => SimplifyResult::None, //TODO(Guillaume) - BlackBoxFunc::Keccak256 => { - match (dfg.get_array_constant(arguments[0]), dfg.get_numeric_constant(arguments[1])) { - (Some((input, _)), Some(num_bytes)) if array_is_constant(dfg, &input) => { - let input_bytes: Vec = to_u8_vec(dfg, input); - - let num_bytes = num_bytes.to_u128() as usize; - let truncated_input_bytes = &input_bytes[0..num_bytes]; - let hash = acvm::blackbox_solver::keccak256(truncated_input_bytes) - .expect("Rust solvable black box function should not fail"); - - let hash_values = - vecmap(hash, |byte| FieldElement::from_be_bytes_reduce(&[byte])); - - let result_array = make_constant_array(dfg, hash_values, Type::unsigned(8)); + BlackBoxFunc::PedersenCommitment | BlackBoxFunc::PedersenHash => SimplifyResult::None, + BlackBoxFunc::Keccakf1600 => { + if let Some((array_input, _)) = dfg.get_array_constant(arguments[0]) { + if array_is_constant(dfg, &array_input) { + let const_input: Vec = array_input + .iter() + .map(|id| { + let field = dfg + .get_numeric_constant(*id) + .expect("value id from array should point at constant"); + field.to_u128() as u64 + }) + .collect(); + + let state = acvm::blackbox_solver::keccakf1600( + const_input.try_into().expect("Keccakf1600 input should have length of 25"), + ) + .expect("Rust solvable black box function should not fail"); + let state_values = vecmap(state, |x| FieldElement::from(x as u128)); + let result_array = make_constant_array(dfg, state_values, Type::unsigned(64)); SimplifyResult::SimplifiedTo(result_array) + } else { + SimplifyResult::None } - _ => SimplifyResult::None, + } else { + SimplifyResult::None } } + BlackBoxFunc::Keccak256 => { + unreachable!("Keccak256 should have been replaced by calls to Keccakf1600") + } BlackBoxFunc::Poseidon2Permutation => SimplifyResult::None, //TODO(Guillaume) BlackBoxFunc::EcdsaSecp256k1 => { simplify_signature(dfg, arguments, acvm::blackbox_solver::ecdsa_secp256k1_verify) diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index c7ce3aaa155..4deb21ef712 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -752,27 +752,24 @@ impl<'f> Context<'f> { Instruction::Call { func, arguments } } Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)) => { - let mut array_with_predicate = im::Vector::new(); - let array_typ; - if let Value::Array { array, typ } = - &self.inserter.function.dfg[arguments[0]] - { - array_typ = typ.clone(); - for (i, value) in array.clone().iter().enumerate() { - if i % 3 == 2 { - array_with_predicate.push_back(self.var_or_one( - *value, - condition, - call_stack.clone(), - )); - } else { - array_with_predicate.push_back(*value); - } - } + let points_array_idx = if matches!( + self.inserter.function.dfg[arguments[0]], + Value::Array { .. } + ) { + 0 } else { - unreachable!(); - } - arguments[0] = + // if the first argument is not an array, we assume it is a slice + // which means the array is the second argument + 1 + }; + let (array_with_predicate, array_typ) = self + .apply_predicate_to_msm_argument( + arguments[points_array_idx], + condition, + call_stack.clone(), + ); + + arguments[points_array_idx] = self.inserter.function.dfg.make_array(array_with_predicate, array_typ); Instruction::Call { func, arguments } } @@ -785,6 +782,40 @@ impl<'f> Context<'f> { } } + /// When a MSM is done under a predicate, we need to apply the predicate + /// to the is_infinity property of the input points in order to ensure + /// that the points will be on the curve no matter what. + fn apply_predicate_to_msm_argument( + &mut self, + argument: ValueId, + predicate: ValueId, + call_stack: CallStack, + ) -> (im::Vector, Type) { + let array_typ; + let mut array_with_predicate = im::Vector::new(); + if let Value::Array { array, typ } = &self.inserter.function.dfg[argument] { + array_typ = typ.clone(); + for (i, value) in array.clone().iter().enumerate() { + if i % 3 == 2 { + array_with_predicate.push_back(self.var_or_one( + *value, + predicate, + call_stack.clone(), + )); + } else { + array_with_predicate.push_back(*value); + } + } + } else { + unreachable!( + "Expected an array, got {}", + &self.inserter.function.dfg.type_of_value(argument) + ); + }; + + (array_with_predicate, array_typ) + } + // Computes: if condition { var } else { 1 } fn var_or_one(&mut self, var: ValueId, condition: ValueId, call_stack: CallStack) -> ValueId { let field = self.insert_instruction( @@ -1381,7 +1412,7 @@ mod test { // Tests that it does not simplify a true constraint an always-false constraint // acir(inline) fn main f1 { // b0(v0: [u8; 2]): - // v4 = call keccak256(v0, u8 2) + // v4 = call sha256(v0, u8 2) // v5 = array_get v4, index u8 0 // v6 = cast v5 as u32 // v8 = truncate v6 to 1 bits, max_bit_size: 32 @@ -1417,7 +1448,7 @@ mod test { let two = builder.numeric_constant(2_u128, Type::unsigned(8)); let keccak = - builder.import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::Keccak256)); + builder.import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::SHA256)); let v4 = builder.insert_call(keccak, vec![array, two], vec![Type::Array(element_type, 32)])[0]; let v5 = builder.insert_array_get(v4, zero, Type::unsigned(8)); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index e013546f14a..8e55debec1d 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -5,7 +5,7 @@ use acvm::{acir::AcirField, FieldElement}; use iter_extended::vecmap; use noirc_errors::Location; use noirc_frontend::ast::{BinaryOpKind, Signedness}; -use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; +use noirc_frontend::monomorphization::ast::{self, InlineType, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; use crate::errors::RuntimeError; @@ -121,9 +121,14 @@ impl<'a> FunctionContext<'a> { /// /// Note that the previous function cannot be resumed after calling this. Developers should /// avoid calling new_function until the previous function is completely finished with ssa-gen. - pub(super) fn new_function(&mut self, id: IrFunctionId, func: &ast::Function) { + pub(super) fn new_function( + &mut self, + id: IrFunctionId, + func: &ast::Function, + force_brillig_runtime: bool, + ) { self.definitions.clear(); - if func.unconstrained { + if func.unconstrained || (force_brillig_runtime && func.inline_type != InlineType::Inline) { self.builder.new_brillig_function(func.name.clone(), id); } else { self.builder.new_function(func.name.clone(), id, func.inline_type); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index afe44881830..abd251b008f 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -111,7 +111,7 @@ pub(crate) fn generate_ssa( // to generate SSA for each function used within the program. while let Some((src_function_id, dest_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; - function_context.new_function(dest_id, function); + function_context.new_function(dest_id, function, force_brillig_runtime); function_context.codegen_function_body(&function.body)?; } diff --git a/compiler/noirc_frontend/Cargo.toml b/compiler/noirc_frontend/Cargo.toml index 052d2c5f484..f7439a09204 100644 --- a/compiler/noirc_frontend/Cargo.toml +++ b/compiler/noirc_frontend/Cargo.toml @@ -10,6 +10,7 @@ license.workspace = true [dependencies] acvm.workspace = true +bn254_blackbox_solver.workspace = true noirc_arena.workspace = true noirc_errors.workspace = true noirc_printable_type.workspace = true diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index 87cc7990753..057daa2bdde 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -800,12 +800,8 @@ impl FunctionDefinition { return_visibility: Visibility::Private, } } -} - -impl Display for FunctionDefinition { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!(f, "{:?}", self.attributes)?; + pub fn signature(&self) -> String { let parameters = vecmap(&self.parameters, |Param { visibility, pattern, typ, span: _ }| { if *visibility == Visibility::Public { format!("{pattern}: {visibility} {typ}") @@ -827,15 +823,14 @@ impl Display for FunctionDefinition { format!(" -> {}", self.return_type) }; - write!( - f, - "fn {}({}){}{} {}", - self.name, - parameters.join(", "), - return_type, - where_clause_str, - self.body - ) + format!("fn {}({}){}{}", self.name, parameters.join(", "), return_type, where_clause_str) + } +} + +impl Display for FunctionDefinition { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "{:?}", self.attributes)?; + write!(f, "fn {} {}", self.signature(), self.body) } } diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index dfe4258744a..038a13529d7 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -301,15 +301,12 @@ impl UnresolvedTypeExpression { // This large error size is justified because it improves parsing speeds by around 40% in // release mode. See `ParserError` definition for further explanation. #[allow(clippy::result_large_err)] - pub fn from_expr( + pub(crate) fn from_expr( expr: Expression, span: Span, ) -> Result { Self::from_expr_helper(expr).map_err(|err_expr| { - ParserError::with_reason( - ParserErrorReason::InvalidArrayLengthExpression(err_expr), - span, - ) + ParserError::with_reason(ParserErrorReason::InvalidTypeExpression(err_expr), span) }) } @@ -323,13 +320,10 @@ impl UnresolvedTypeExpression { fn from_expr_helper(expr: Expression) -> Result { match expr.kind { - ExpressionKind::Literal(Literal::Integer(int, sign)) => { - assert!(!sign, "Negative literal is not allowed here"); - match int.try_to_u32() { - Some(int) => Ok(UnresolvedTypeExpression::Constant(int, expr.span)), - None => Err(expr), - } - } + ExpressionKind::Literal(Literal::Integer(int, _)) => match int.try_to_u32() { + Some(int) => Ok(UnresolvedTypeExpression::Constant(int, expr.span)), + None => Err(expr), + }, ExpressionKind::Variable(path, _) => Ok(UnresolvedTypeExpression::Variable(path)), ExpressionKind::Prefix(prefix) if prefix.operator == UnaryOp::Minus => { let lhs = Box::new(UnresolvedTypeExpression::Constant(0, expr.span)); diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 3e6a140ff93..b41efebc905 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -10,7 +10,7 @@ use super::{ BlockExpression, Expression, ExpressionKind, IndexExpression, MemberAccessExpression, MethodCallExpression, UnresolvedType, }; -use crate::hir::resolution::resolver::SELF_TYPE_NAME; +use crate::elaborator::types::SELF_TYPE_NAME; use crate::lexer::token::SpannedToken; use crate::macros_api::SecondaryAttribute; use crate::parser::{ParserError, ParserErrorReason}; @@ -299,6 +299,7 @@ pub enum PathKind { Crate, Dep, Plain, + Super, } #[derive(Debug, PartialEq, Eq, Clone)] @@ -748,6 +749,7 @@ impl Display for PathKind { match self { PathKind::Crate => write!(f, "crate"), PathKind::Dep => write!(f, "dep"), + PathKind::Super => write!(f, "super"), PathKind::Plain => write!(f, "plain"), } } diff --git a/compiler/noirc_frontend/src/ast/structure.rs b/compiler/noirc_frontend/src/ast/structure.rs index bb2d89841b9..112747e09fb 100644 --- a/compiler/noirc_frontend/src/ast/structure.rs +++ b/compiler/noirc_frontend/src/ast/structure.rs @@ -14,18 +14,7 @@ pub struct NoirStruct { pub generics: UnresolvedGenerics, pub fields: Vec<(Ident, UnresolvedType)>, pub span: Span, -} - -impl NoirStruct { - pub fn new( - name: Ident, - attributes: Vec, - generics: UnresolvedGenerics, - fields: Vec<(Ident, UnresolvedType)>, - span: Span, - ) -> NoirStruct { - NoirStruct { name, attributes, generics, fields, span } - } + pub is_comptime: bool, } impl Display for NoirStruct { diff --git a/compiler/noirc_frontend/src/ast/traits.rs b/compiler/noirc_frontend/src/ast/traits.rs index b1b14e3f657..b23fbaede61 100644 --- a/compiler/noirc_frontend/src/ast/traits.rs +++ b/compiler/noirc_frontend/src/ast/traits.rs @@ -7,6 +7,7 @@ use crate::ast::{ BlockExpression, Expression, FunctionReturnType, Ident, NoirFunction, Path, UnresolvedGenerics, UnresolvedType, }; +use crate::macros_api::SecondaryAttribute; use crate::node_interner::TraitId; /// AST node for trait definitions: @@ -18,6 +19,7 @@ pub struct NoirTrait { pub where_clause: Vec, pub span: Span, pub items: Vec, + pub attributes: Vec, } /// Any declaration inside the body of a trait that a user is required to @@ -51,6 +53,7 @@ pub struct TypeImpl { pub generics: UnresolvedGenerics, pub where_clause: Vec, pub methods: Vec<(NoirFunction, Span)>, + pub is_comptime: bool, } /// Ast node for an implementation of a trait for a particular type @@ -67,6 +70,8 @@ pub struct NoirTraitImpl { pub where_clause: Vec, pub items: Vec, + + pub is_comptime: bool, } /// Represents a simple trait constraint such as `where Foo: TraitY` @@ -82,7 +87,7 @@ pub struct UnresolvedTraitConstraint { } /// Represents a single trait bound, such as `TraitX` or `TraitY` -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct TraitBound { pub trait_path: Path, pub trait_id: Option, // initially None, gets assigned during DC diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs new file mode 100644 index 00000000000..0cbd2db55da --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -0,0 +1,71 @@ +use std::mem::replace; + +use crate::{ + hir_def::expr::HirIdent, + macros_api::Expression, + node_interner::{DependencyId, ExprId, FuncId}, +}; + +use super::{Elaborator, FunctionContext, ResolverMeta}; + +impl<'context> Elaborator<'context> { + /// Elaborate an expression from the middle of a comptime scope. + /// When this happens we require additional information to know + /// what variables should be in scope. + pub fn elaborate_expression_from_comptime( + &mut self, + expr: Expression, + function: Option, + ) -> ExprId { + self.function_context.push(FunctionContext::default()); + let old_scope = self.scopes.end_function(); + self.scopes.start_function(); + let function_id = function.map(DependencyId::Function); + let old_item = replace(&mut self.current_item, function_id); + + // Note: recover_generics isn't good enough here because any existing generics + // should not be in scope of this new function + let old_generics = std::mem::take(&mut self.generics); + + let old_crate_and_module = function.map(|function| { + let meta = self.interner.function_meta(&function); + let old_crate = replace(&mut self.crate_id, meta.source_crate); + let old_module = replace(&mut self.local_module, meta.source_module); + self.introduce_generics_into_scope(meta.all_generics.clone()); + (old_crate, old_module) + }); + + self.populate_scope_from_comptime_scopes(); + let expr = self.elaborate_expression(expr).0; + + if let Some((old_crate, old_module)) = old_crate_and_module { + self.crate_id = old_crate; + self.local_module = old_module; + } + + self.generics = old_generics; + self.current_item = old_item; + self.scopes.end_function(); + self.scopes.0.push(old_scope); + self.check_and_pop_function_context(); + expr + } + + fn populate_scope_from_comptime_scopes(&mut self) { + // Take the comptime scope to be our runtime scope. + // Iterate from global scope to the most local scope so that the + // later definitions will naturally shadow the former. + for scope in &self.comptime_scopes { + for definition_id in scope.keys() { + let definition = self.interner.definition(*definition_id); + let name = definition.name.clone(); + let location = definition.location; + + let scope = self.scopes.get_mut_scope(); + let ident = HirIdent::non_trait_method(*definition_id, location); + let meta = ResolverMeta { ident, num_times_used: 0, warn_if_unused: false }; + scope.add_key_value(name.clone(), meta); + } + } + } +} diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs index 5cda8787241..853098ce931 100644 --- a/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -10,7 +10,7 @@ use crate::{ }, hir::{ comptime::{self, InterpreterError}, - resolution::{errors::ResolverError, resolver::LambdaContext}, + resolution::errors::ResolverError, type_check::TypeCheckError, }, hir_def::{ @@ -27,12 +27,12 @@ use crate::{ HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, MethodCallExpression, PrefixExpression, }, - node_interner::{DefinitionKind, ExprId, FuncId, ReferenceId, TraitMethodId}, + node_interner::{DefinitionKind, ExprId, FuncId, TraitMethodId}, token::Tokens, QuotedType, Shared, StructType, Type, }; -use super::Elaborator; +use super::{Elaborator, LambdaContext}; impl<'context> Elaborator<'context> { pub(super) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { @@ -300,15 +300,21 @@ impl<'context> Elaborator<'context> { } let location = Location::new(span, self.file); - let hir_call = HirCallExpression { func, arguments, location }; - let typ = self.type_check_call(&hir_call, func_type, args, span); + let is_macro_call = call.is_macro_call; + let hir_call = HirCallExpression { func, arguments, location, is_macro_call }; + let mut typ = self.type_check_call(&hir_call, func_type, args, span); - if call.is_macro_call { - self.call_macro(func, comptime_args, location, typ) - .unwrap_or_else(|| (HirExpression::Error, Type::Error)) - } else { - (HirExpression::Call(hir_call), typ) + if is_macro_call { + if self.in_comptime_context() { + typ = self.interner.next_type_variable(); + } else { + return self + .call_macro(func, comptime_args, location, typ) + .unwrap_or_else(|| (HirExpression::Error, Type::Error)); + } } + + (HirExpression::Call(hir_call), typ) } fn elaborate_method_call( @@ -320,6 +326,7 @@ impl<'context> Elaborator<'context> { let (mut object, mut object_type) = self.elaborate_expression(method_call.object); object_type = object_type.follow_bindings(); + let method_name_span = method_call.method_name.span(); let method_name = method_call.method_name.0.contents.as_str(); match self.lookup_method(&object_type, method_name, span) { Some(method_ref) => { @@ -367,6 +374,7 @@ impl<'context> Elaborator<'context> { let location = Location::new(span, self.file); let method = method_call.method_name; let turbofish_generics = generics.clone(); + let is_macro_call = method_call.is_macro_call; let method_call = HirMethodCallExpression { method, object, arguments, location, generics }; @@ -376,6 +384,7 @@ impl<'context> Elaborator<'context> { let ((function_id, function_name), function_call) = method_call.into_function_call( &method_ref, object_type, + is_macro_call, location, self.interner, ); @@ -385,6 +394,9 @@ impl<'context> Elaborator<'context> { self.interner.push_expr_type(function_id, func_type.clone()); + self.interner + .add_function_reference(func_id, Location::new(method_name_span, self.file)); + // Type check the new call now that it has been changed from a method call // to a function call. This way we avoid duplicating code. let typ = self.type_check_call(&function_call, func_type, function_args, span); @@ -399,7 +411,8 @@ impl<'context> Elaborator<'context> { constructor: ConstructorExpression, ) -> (HirExpression, Type) { let span = constructor.type_name.span(); - let is_self_type = constructor.type_name.last_segment().is_self_type_name(); + let last_segment = constructor.type_name.last_segment(); + let is_self_type = last_segment.is_self_type_name(); let (r#type, struct_generics) = if let Some(struct_id) = constructor.struct_type { let typ = self.interner.get_struct(struct_id); @@ -429,9 +442,9 @@ impl<'context> Elaborator<'context> { struct_generics, }); - let referenced = ReferenceId::Struct(struct_type.borrow().id); - let reference = ReferenceId::Reference(Location::new(span, self.file), is_self_type); - self.interner.add_reference(referenced, reference); + let struct_id = struct_type.borrow().id; + let reference_location = Location::new(last_segment.span(), self.file); + self.interner.add_struct_reference(struct_id, reference_location, is_self_type); (expr, Type::Struct(struct_type, generics)) } @@ -485,11 +498,11 @@ impl<'context> Elaborator<'context> { } if let Some(expected_index) = expected_index { - let struct_id = struct_type.borrow().id; - let referenced = ReferenceId::StructMember(struct_id, expected_index); - let reference = - ReferenceId::Reference(Location::new(field_name.span(), self.file), false); - self.interner.add_reference(referenced, reference); + self.interner.add_struct_member_reference( + struct_type.borrow().id, + expected_index, + Location::new(field_name.span(), self.file), + ); } ret.push((field_name, resolved)); @@ -581,6 +594,7 @@ impl<'context> Elaborator<'context> { typ: operand_type.clone(), trait_id: trait_id.trait_id, trait_generics: Vec::new(), + span, }; self.push_trait_constraint(constraint, expr_id); self.type_check_operator_method(expr_id, trait_id, operand_type, span); @@ -703,10 +717,8 @@ impl<'context> Elaborator<'context> { let (block, _typ) = self.elaborate_block_expression(block); self.check_and_pop_function_context(); - let mut interpreter_errors = vec![]; - let mut interpreter = self.setup_interpreter(&mut interpreter_errors); + let mut interpreter = self.setup_interpreter(); let value = interpreter.evaluate_block(block); - self.include_interpreter_errors(interpreter_errors); let (id, typ) = self.inline_comptime_value(value, span); let location = self.interner.id_location(id); @@ -717,7 +729,7 @@ impl<'context> Elaborator<'context> { (id, typ) } - pub(super) fn inline_comptime_value( + pub fn inline_comptime_value( &mut self, value: Result, span: Span, @@ -745,19 +757,23 @@ impl<'context> Elaborator<'context> { &mut self, func: ExprId, location: Location, - ) -> Result { + ) -> Result, ResolverError> { match self.interner.expression(&func) { HirExpression::Ident(ident, _generics) => { - let definition = self.interner.definition(ident.id); - if let DefinitionKind::Function(function) = definition.kind { - let meta = self.interner.function_modifiers(&function); - if meta.is_comptime { - Ok(function) + if let Some(definition) = self.interner.try_definition(ident.id) { + if let DefinitionKind::Function(function) = definition.kind { + let meta = self.interner.function_modifiers(&function); + if meta.is_comptime { + Ok(Some(function)) + } else { + Err(ResolverError::MacroIsNotComptime { span: location.span }) + } } else { - Err(ResolverError::MacroIsNotComptime { span: location.span }) + Err(ResolverError::InvalidSyntaxInMacroCall { span: location.span }) } } else { - Err(ResolverError::InvalidSyntaxInMacroCall { span: location.span }) + // Assume a name resolution error has already been issued + Ok(None) } } _ => Err(ResolverError::InvalidSyntaxInMacroCall { span: location.span }), @@ -778,7 +794,7 @@ impl<'context> Elaborator<'context> { }); let function = match self.try_get_comptime_function(func, location) { - Ok(function) => function, + Ok(function) => function?, Err(error) => { self.push_err(error); return None; @@ -786,24 +802,22 @@ impl<'context> Elaborator<'context> { }; let file = self.file; - let mut interpreter_errors = vec![]; - let mut interpreter = self.setup_interpreter(&mut interpreter_errors); + let mut interpreter = self.setup_interpreter(); let mut comptime_args = Vec::new(); let mut errors = Vec::new(); for argument in arguments { match interpreter.evaluate(argument) { Ok(arg) => { - let location = interpreter.interner.expr_location(&argument); + let location = interpreter.elaborator.interner.expr_location(&argument); comptime_args.push((arg, location)); } Err(error) => errors.push((error.into(), file)), } } - let bindings = interpreter.interner.get_instantiation_bindings(func).clone(); + let bindings = interpreter.elaborator.interner.get_instantiation_bindings(func).clone(); let result = interpreter.call_function(function, comptime_args, bindings, location); - self.include_interpreter_errors(interpreter_errors); if !errors.is_empty() { self.errors.append(&mut errors); diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index e1d104c4971..e0affad1fbf 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -7,25 +7,25 @@ use std::{ use crate::{ ast::{FunctionKind, UnresolvedTraitConstraint}, hir::{ - comptime::{self, Interpreter, InterpreterError, Value}, + comptime::{Interpreter, InterpreterError, Value}, def_collector::{ dc_crate::{ filter_literal_globals, CompilationError, ImplMap, UnresolvedGlobal, UnresolvedStruct, UnresolvedTypeAlias, }, dc_mod, - errors::DuplicateType, }, - resolution::{errors::ResolverError, path_resolver::PathResolver, resolver::LambdaContext}, + resolution::{errors::ResolverError, path_resolver::PathResolver}, scope::ScopeForest as GenericScopeForest, - type_check::{check_trait_impl_method_matches_declaration, TypeCheckError}, + type_check::TypeCheckError, }, hir_def::{ - expr::HirIdent, + expr::{HirCapturedVar, HirIdent}, function::{FunctionBody, Parameters}, traits::TraitConstraint, types::{Generics, Kind, ResolvedGeneric}, }, + lexer::Lexer, macros_api::{ BlockExpression, Ident, NodeInterner, NoirFunction, NoirStruct, Pattern, SecondaryAttribute, StructId, @@ -35,6 +35,7 @@ use crate::{ TypeAliasId, }, parser::TopLevelStatement, + token::Tokens, Shared, Type, TypeBindings, TypeVariable, }; use crate::{ @@ -59,19 +60,23 @@ use crate::{ macros_api::ItemVisibility, }; +mod comptime; mod expressions; mod lints; mod patterns; mod scope; mod statements; +mod trait_impls; mod traits; -mod types; +pub mod types; mod unquote; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span}; -use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use rustc_hash::FxHashMap as HashMap; + +use self::traits::check_trait_impl_method_matches_declaration; /// ResolverMetas are tagged onto each definition to track how many times they are used #[derive(Debug, PartialEq, Eq)] @@ -83,12 +88,19 @@ pub struct ResolverMeta { type ScopeForest = GenericScopeForest; +pub struct LambdaContext { + pub captures: Vec, + /// the index in the scope tree + /// (sometimes being filled by ScopeTree's find method) + pub scope_index: usize, +} + pub struct Elaborator<'context> { scopes: ScopeForest, - errors: Vec<(CompilationError, FileId)>, + pub(crate) errors: Vec<(CompilationError, FileId)>, - interner: &'context mut NodeInterner, + pub(crate) interner: &'context mut NodeInterner, def_maps: &'context mut BTreeMap, @@ -156,7 +168,7 @@ pub struct Elaborator<'context> { /// Each value currently in scope in the comptime interpreter. /// Each element of the Vec represents a scope with every scope together making /// up all currently visible definitions. The first scope is always the global scope. - comptime_scopes: Vec>, + pub(crate) comptime_scopes: Vec>, /// The scope of --debug-comptime, or None if unset debug_comptime_in_file: Option, @@ -217,6 +229,15 @@ impl<'context> Elaborator<'context> { items: CollectedItems, debug_comptime_in_file: Option, ) -> Vec<(CompilationError, FileId)> { + Self::elaborate_and_return_self(context, crate_id, items, debug_comptime_in_file).errors + } + + pub fn elaborate_and_return_self( + context: &'context mut Context, + crate_id: CrateId, + items: CollectedItems, + debug_comptime_in_file: Option, + ) -> Self { let mut this = Self::new(context, crate_id, debug_comptime_in_file); // Filter out comptime items to execute their functions first if needed. @@ -227,7 +248,7 @@ impl<'context> Elaborator<'context> { let (comptime_items, runtime_items) = Self::filter_comptime_items(items); this.elaborate_items(comptime_items); this.elaborate_items(runtime_items); - this.errors + this } fn elaborate_items(&mut self, mut items: CollectedItems) { @@ -250,11 +271,11 @@ impl<'context> Elaborator<'context> { } // Must resolve structs before we resolve globals. - let generated_items = self.collect_struct_definitions(items.types); + let mut generated_items = self.collect_struct_definitions(items.types); self.define_function_metas(&mut items.functions, &mut items.impls, &mut items.trait_impls); - self.collect_traits(items.traits); + self.collect_traits(items.traits, &mut generated_items); // Before we resolve any function symbols we must go through our impls and // re-collect the methods within into their proper module. This cannot be @@ -276,6 +297,10 @@ impl<'context> Elaborator<'context> { self.elaborate_global(global); } + // We have to run any comptime attributes on functions before the function is elaborated + // since the generated items are checked beforehand as well. + self.run_attributes_on_functions(&items.functions, &mut generated_items); + // After everything is collected, we can elaborate our generated items. // It may be better to inline these within `items` entirely since elaborating them // all here means any globals will not see these. Inlining them completely within `items` @@ -324,6 +349,21 @@ impl<'context> Elaborator<'context> { self.trait_id = None; } + fn introduce_generics_into_scope(&mut self, all_generics: Vec) { + // Introduce all numeric generics into scope + for generic in &all_generics { + if let Kind::Numeric(typ) = &generic.kind { + let definition = DefinitionKind::GenericType(generic.type_var.clone()); + let ident = Ident::new(generic.name.to_string(), generic.span); + let hir_ident = + self.add_variable_decl_inner(ident, false, false, false, definition); + self.interner.push_definition_type(hir_ident.id, *typ.clone()); + } + } + + self.generics = all_generics; + } + fn elaborate_function(&mut self, id: FuncId) { let func_meta = self.interner.func_meta.get_mut(&id); let func_meta = @@ -345,16 +385,7 @@ impl<'context> Elaborator<'context> { self.trait_bounds = func_meta.trait_constraints.clone(); self.function_context.push(FunctionContext::default()); - // Introduce all numeric generics into scope - for generic in &func_meta.all_generics { - if let Kind::Numeric(typ) = &generic.kind { - let definition = DefinitionKind::GenericType(generic.type_var.clone()); - let ident = Ident::new(generic.name.to_string(), generic.span); - let hir_ident = - self.add_variable_decl_inner(ident, false, false, false, definition); - self.interner.push_definition_type(hir_ident.id, *typ.clone()); - } - } + self.introduce_generics_into_scope(func_meta.all_generics.clone()); // The DefinitionIds for each parameter were already created in define_function_meta // so we need to reintroduce the same IDs into scope here. @@ -363,8 +394,6 @@ impl<'context> Elaborator<'context> { self.add_existing_variable_to_scope(name, parameter.clone(), true); } - self.generics = func_meta.all_generics.clone(); - self.declare_numeric_generics(&func_meta.parameters, func_meta.return_type()); self.add_trait_constraints_to_scope(&func_meta); @@ -607,7 +636,7 @@ impl<'context> Elaborator<'context> { }); } - Some(TraitConstraint { typ, trait_id, trait_generics }) + Some(TraitConstraint { typ, trait_id, trait_generics, span }) } /// Extract metadata from a NoirFunction @@ -618,7 +647,7 @@ impl<'context> Elaborator<'context> { &mut self, func: &mut NoirFunction, func_id: FuncId, - is_trait_function: bool, + trait_id: Option, ) { let in_contract = if self.self_type.is_some() { // Without this, impl methods can accidentally be placed in contracts. @@ -718,6 +747,12 @@ impl<'context> Elaborator<'context> { let statements = std::mem::take(&mut func.def.body.statements); let body = BlockExpression { statements }; + let struct_id = if let Some(Type::Struct(struct_type, _)) = &self.self_type { + Some(struct_type.borrow().id) + } else { + None + }; + let meta = FuncMeta { name: name_ident, kind: func.kind, @@ -725,6 +760,8 @@ impl<'context> Elaborator<'context> { typ, direct_generics, all_generics: self.generics.clone(), + struct_id, + trait_id, trait_impl: self.current_trait_impl, parameters: parameters.into(), parameter_idents, @@ -733,9 +770,9 @@ impl<'context> Elaborator<'context> { has_body: !func.def.body.is_empty(), trait_constraints, is_entry_point, - is_trait_function, has_inline_attribute, source_crate: self.crate_id, + source_module: self.local_module, function_body: FunctionBody::Unresolved(func.kind, body, func.def.span), }; @@ -907,7 +944,14 @@ impl<'context> Elaborator<'context> { if let Some(trait_id) = trait_impl.trait_id { self.generics = trait_impl.resolved_generics.clone(); - self.collect_trait_impl_methods(trait_id, trait_impl); + + let where_clause = trait_impl + .where_clause + .iter() + .flat_map(|item| self.resolve_trait_constraint(item)) + .collect::>(); + + self.collect_trait_impl_methods(trait_id, trait_impl, &where_clause); let span = trait_impl.object_type.span.expect("All trait self types should have spans"); self.declare_methods_on_struct(true, &mut trait_impl.methods, span); @@ -917,12 +961,6 @@ impl<'context> Elaborator<'context> { self.interner.set_function_trait(*func_id, self_type.clone(), trait_id); } - let where_clause = trait_impl - .where_clause - .iter() - .flat_map(|item| self.resolve_trait_constraint(item)) - .collect(); - let trait_generics = trait_impl.resolved_trait_generics.clone(); let resolved_trait_impl = Shared::new(TraitImpl { @@ -1053,121 +1091,6 @@ impl<'context> Elaborator<'context> { } } - fn collect_trait_impl_methods( - &mut self, - trait_id: TraitId, - trait_impl: &mut UnresolvedTraitImpl, - ) { - self.local_module = trait_impl.module_id; - self.file = trait_impl.file_id; - - // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation - // for a particular method, the default implementation will be added at that slot. - let mut ordered_methods = Vec::new(); - - // check whether the trait implementation is in the same crate as either the trait or the type - self.check_trait_impl_crate_coherence(trait_id, trait_impl); - - // set of function ids that have a corresponding method in the trait - let mut func_ids_in_trait = HashSet::default(); - - // Temporarily take ownership of the trait's methods so we can iterate over them - // while also mutating the interner - let the_trait = self.interner.get_trait_mut(trait_id); - let methods = std::mem::take(&mut the_trait.methods); - - for method in &methods { - let overrides: Vec<_> = trait_impl - .methods - .functions - .iter() - .filter(|(_, _, f)| f.name() == method.name.0.contents) - .collect(); - - if overrides.is_empty() { - if let Some(default_impl) = &method.default_impl { - // copy 'where' clause from unresolved trait impl - let mut default_impl_clone = default_impl.clone(); - default_impl_clone.def.where_clause.extend(trait_impl.where_clause.clone()); - - let func_id = self.interner.push_empty_fn(); - let module = self.module_id(); - let location = Location::new(default_impl.def.span, trait_impl.file_id); - self.interner.push_function(func_id, &default_impl.def, module, location); - self.define_function_meta(&mut default_impl_clone, func_id, false); - func_ids_in_trait.insert(func_id); - ordered_methods.push(( - method.default_impl_module_id, - func_id, - *default_impl_clone, - )); - } else { - self.push_err(DefCollectorErrorKind::TraitMissingMethod { - trait_name: self.interner.get_trait(trait_id).name.clone(), - method_name: method.name.clone(), - trait_impl_span: trait_impl - .object_type - .span - .expect("type must have a span"), - }); - } - } else { - for (_, func_id, _) in &overrides { - func_ids_in_trait.insert(*func_id); - } - - if overrides.len() > 1 { - self.push_err(DefCollectorErrorKind::Duplicate { - typ: DuplicateType::TraitAssociatedFunction, - first_def: overrides[0].2.name_ident().clone(), - second_def: overrides[1].2.name_ident().clone(), - }); - } - - ordered_methods.push(overrides[0].clone()); - } - } - - // Restore the methods that were taken before the for loop - let the_trait = self.interner.get_trait_mut(trait_id); - the_trait.set_methods(methods); - - // Emit MethodNotInTrait error for methods in the impl block that - // don't have a corresponding method signature defined in the trait - for (_, func_id, func) in &trait_impl.methods.functions { - if !func_ids_in_trait.contains(func_id) { - let trait_name = the_trait.name.clone(); - let impl_method = func.name_ident().clone(); - let error = DefCollectorErrorKind::MethodNotInTrait { trait_name, impl_method }; - self.errors.push((error.into(), self.file)); - } - } - - trait_impl.methods.functions = ordered_methods; - trait_impl.methods.trait_id = Some(trait_id); - } - - fn check_trait_impl_crate_coherence( - &mut self, - trait_id: TraitId, - trait_impl: &UnresolvedTraitImpl, - ) { - self.local_module = trait_impl.module_id; - self.file = trait_impl.file_id; - - let object_crate = match &trait_impl.resolved_object_type { - Some(Type::Struct(struct_type, _)) => struct_type.borrow().id.krate(), - _ => CrateId::Dummy, - }; - - let the_trait = self.interner.get_trait(trait_id); - if self.crate_id != the_trait.crate_id && self.crate_id != object_crate { - self.push_err(DefCollectorErrorKind::TraitImplOrphaned { - span: trait_impl.object_type.span.expect("object type must have a span"), - }); - } - } - fn define_type_alias(&mut self, alias_id: TypeAliasId, alias: UnresolvedTypeAlias) { self.file = alias.file_id; self.local_module = alias.module_id; @@ -1230,10 +1153,11 @@ impl<'context> Elaborator<'context> { for field_index in 0..fields_len { self.interner - .add_definition_location(ReferenceId::StructMember(type_id, field_index)); + .add_definition_location(ReferenceId::StructMember(type_id, field_index), None); } - self.run_comptime_attributes_on_struct(attributes, type_id, span, &mut generated_items); + let item = Value::StructDefinition(type_id); + self.run_comptime_attributes_on_item(&attributes, item, span, &mut generated_items); } // Check whether the struct fields have nested slices @@ -1259,17 +1183,17 @@ impl<'context> Elaborator<'context> { generated_items } - fn run_comptime_attributes_on_struct( + fn run_comptime_attributes_on_item( &mut self, - attributes: Vec, - struct_id: StructId, + attributes: &[SecondaryAttribute], + item: Value, span: Span, generated_items: &mut CollectedItems, ) { for attribute in attributes { if let SecondaryAttribute::Custom(name) = attribute { if let Err(error) = - self.run_comptime_attribute_on_struct(name, struct_id, span, generated_items) + self.run_comptime_attribute_on_item(name, item.clone(), span, generated_items) { self.errors.push(error); } @@ -1277,30 +1201,35 @@ impl<'context> Elaborator<'context> { } } - fn run_comptime_attribute_on_struct( + fn run_comptime_attribute_on_item( &mut self, - attribute: String, - struct_id: StructId, + attribute: &str, + item: Value, span: Span, generated_items: &mut CollectedItems, ) -> Result<(), (CompilationError, FileId)> { - let id = self - .lookup_global(Path::from_single(attribute, span)) - .map_err(|_| (ResolverError::UnknownAnnotation { span }.into(), self.file))?; + let location = Location::new(span, self.file); + let (function_name, mut arguments) = Self::parse_attribute(attribute, location) + .unwrap_or_else(|| (attribute.to_string(), Vec::new())); + + let Ok(id) = self.lookup_global(Path::from_single(function_name, span)) else { + // Do not issue an error if the attribute is unknown + return Ok(()); + }; let definition = self.interner.definition(id); let DefinitionKind::Function(function) = definition.kind else { return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); }; - let location = Location::new(span, self.file); - let mut interpreter_errors = vec![]; - let mut interpreter = self.setup_interpreter(&mut interpreter_errors); - let arguments = vec![(Value::StructDefinition(struct_id), location)]; + + self.handle_varargs_attribute(function, &mut arguments, location); + arguments.insert(0, (item, location)); + + let mut interpreter = self.setup_interpreter(); let value = interpreter .call_function(function, arguments, TypeBindings::new(), location) .map_err(|error| error.into_compilation_error_pair())?; - self.include_interpreter_errors(interpreter_errors); if value != Value::Unit { let items = value @@ -1313,6 +1242,59 @@ impl<'context> Elaborator<'context> { Ok(()) } + /// Parses an attribute in the form of a function call (e.g. `#[foo(a b, c d)]`) into + /// the function and quoted arguments called (e.g. `("foo", vec![(a b, location), (c d, location)])`) + fn parse_attribute( + annotation: &str, + location: Location, + ) -> Option<(String, Vec<(Value, Location)>)> { + let (tokens, errors) = Lexer::lex(annotation); + if !errors.is_empty() { + return None; + } + + let mut tokens = tokens.0; + if tokens.len() >= 4 { + // Remove the outer `ident ( )` wrapping the function arguments + let first = tokens.remove(0).into_token(); + let second = tokens.remove(0).into_token(); + + // Last token is always an EndOfInput + let _ = tokens.pop().unwrap().into_token(); + let last = tokens.pop().unwrap().into_token(); + + use crate::lexer::token::Token::*; + if let (Ident(name), LeftParen, RightParen) = (first, second, last) { + let args = tokens.split(|token| *token.token() == Comma); + let args = + vecmap(args, |arg| (Value::Code(Rc::new(Tokens(arg.to_vec()))), location)); + return Some((name, args)); + } + } + + None + } + + /// Checks if the given attribute function is a varargs function. + /// If so, we should pass its arguments in one slice rather than as separate arguments. + fn handle_varargs_attribute( + &mut self, + function: FuncId, + arguments: &mut Vec<(Value, Location)>, + location: Location, + ) { + let meta = self.interner.function_meta(&function); + let parameters = &meta.parameters.0; + + // If the last parameter is a slice, this is a varargs function. + if parameters.last().map_or(false, |(_, typ, _)| matches!(typ, Type::Slice(_))) { + let typ = Type::Slice(Box::new(Type::Quoted(crate::QuotedType::Quoted))); + let slice_elements = arguments.drain(..).map(|(value, _)| value); + let slice = Value::Slice(slice_elements.collect(), typ); + arguments.push((slice, location)); + } + } + pub fn resolve_struct_fields( &mut self, unresolved: NoirStruct, @@ -1365,7 +1347,8 @@ impl<'context> Elaborator<'context> { self.elaborate_comptime_global(global_id); } - self.interner.add_definition_location(ReferenceId::Global(global_id)); + self.interner + .add_definition_location(ReferenceId::Global(global_id), Some(self.module_id())); self.local_module = old_module; self.file = old_file; @@ -1381,8 +1364,7 @@ impl<'context> Elaborator<'context> { let global = self.interner.get_global(global_id); let definition_id = global.definition_id; let location = global.location; - let mut interpreter_errors = vec![]; - let mut interpreter = self.setup_interpreter(&mut interpreter_errors); + let mut interpreter = self.setup_interpreter(); if let Err(error) = interpreter.evaluate_let(let_statement) { self.errors.push(error.into_compilation_error_pair()); @@ -1397,7 +1379,6 @@ impl<'context> Elaborator<'context> { self.interner.get_global_mut(global_id).value = Some(value); } - self.include_interpreter_errors(interpreter_errors); } fn define_function_metas( @@ -1437,22 +1418,13 @@ impl<'context> Elaborator<'context> { trait_impl.resolved_generics = self.generics.clone(); // Fetch trait constraints here - let trait_generics = if let Some(trait_id) = trait_impl.trait_id { - let trait_def = self.interner.get_trait(trait_id); - let resolved_generics = trait_def.generics.clone(); - assert_eq!(resolved_generics.len(), trait_impl.trait_generics.len()); - trait_impl - .trait_generics - .iter() - .enumerate() - .map(|(i, generic)| { - self.resolve_type_inner(generic.clone(), &resolved_generics[i].kind) - }) - .collect() - } else { - // We still resolve as to continue type checking - vecmap(&trait_impl.trait_generics, |generic| self.resolve_type(generic.clone())) - }; + let trait_generics = trait_impl + .trait_id + .and_then(|trait_id| self.resolve_trait_impl_generics(trait_impl, trait_id)) + .unwrap_or_else(|| { + // We still resolve as to continue type checking + vecmap(&trait_impl.trait_generics, |generic| self.resolve_type(generic.clone())) + }); trait_impl.resolved_trait_generics = trait_generics; @@ -1471,13 +1443,11 @@ impl<'context> Elaborator<'context> { if let Some(trait_id) = trait_id { let trait_name = trait_impl.trait_path.last_segment(); - - let referenced = ReferenceId::Trait(trait_id); - let reference = ReferenceId::Reference( + self.interner.add_trait_reference( + trait_id, Location::new(trait_name.span(), trait_impl.file_id), trait_name.is_self_type_name(), ); - self.interner.add_reference(referenced, reference); } } } @@ -1488,15 +1458,11 @@ impl<'context> Elaborator<'context> { for (local_module, id, func) in &mut function_set.functions { self.local_module = *local_module; self.recover_generics(|this| { - this.define_function_meta(func, *id, false); + this.define_function_meta(func, *id, None); }); } } - fn include_interpreter_errors(&mut self, errors: Vec) { - self.errors.extend(errors.into_iter().map(InterpreterError::into_compilation_error_pair)); - } - /// True if we're currently within a `comptime` block, function, or global fn in_comptime_context(&self) -> bool { // The first context is the global context, followed by the function-specific context. @@ -1555,17 +1521,25 @@ impl<'context> Elaborator<'context> { function_sets.push(UnresolvedFunctions { functions, file_id, trait_id, self_type }); } + let (comptime_trait_impls, trait_impls) = + items.trait_impls.into_iter().partition(|trait_impl| trait_impl.is_comptime); + + let (comptime_structs, structs) = + items.types.into_iter().partition(|typ| typ.1.struct_def.is_comptime); + let comptime = CollectedItems { functions: comptime_function_sets, - types: BTreeMap::new(), + types: comptime_structs, type_aliases: BTreeMap::new(), traits: BTreeMap::new(), - trait_impls: Vec::new(), + trait_impls: comptime_trait_impls, globals: Vec::new(), impls: rustc_hash::FxHashMap::default(), }; items.functions = function_sets; + items.trait_impls = trait_impls; + items.types = structs; (comptime, items) } @@ -1576,89 +1550,95 @@ impl<'context> Elaborator<'context> { location: Location, ) { for item in items { - match item { - TopLevelStatement::Function(function) => { - let id = self.interner.push_empty_fn(); - let module = self.module_id(); - self.interner.push_function(id, &function.def, module, location); - let functions = vec![(self.local_module, id, function)]; - generated_items.functions.push(UnresolvedFunctions { - file_id: self.file, - functions, - trait_id: None, - self_type: None, - }); - } - TopLevelStatement::TraitImpl(mut trait_impl) => { - let methods = dc_mod::collect_trait_impl_functions( - self.interner, - &mut trait_impl, - self.crate_id, - self.file, - self.local_module, - ); + self.add_item(item, generated_items, location); + } + } - generated_items.trait_impls.push(UnresolvedTraitImpl { - file_id: self.file, - module_id: self.local_module, - trait_generics: trait_impl.trait_generics, - trait_path: trait_impl.trait_name, - object_type: trait_impl.object_type, - methods, - generics: trait_impl.impl_generics, - where_clause: trait_impl.where_clause, - - // These last fields are filled in later - trait_id: None, - impl_id: None, - resolved_object_type: None, - resolved_generics: Vec::new(), - resolved_trait_generics: Vec::new(), - }); - } - TopLevelStatement::Global(global) => { - let (global, error) = dc_mod::collect_global( - self.interner, - self.def_maps.get_mut(&self.crate_id).unwrap(), - global, - self.file, - self.local_module, - ); + fn add_item( + &mut self, + item: TopLevelStatement, + generated_items: &mut CollectedItems, + location: Location, + ) { + match item { + TopLevelStatement::Function(function) => { + let id = self.interner.push_empty_fn(); + let module = self.module_id(); + self.interner.push_function(id, &function.def, module, location); + let functions = vec![(self.local_module, id, function)]; + generated_items.functions.push(UnresolvedFunctions { + file_id: self.file, + functions, + trait_id: None, + self_type: None, + }); + } + TopLevelStatement::TraitImpl(mut trait_impl) => { + let methods = dc_mod::collect_trait_impl_functions( + self.interner, + &mut trait_impl, + self.crate_id, + self.file, + self.local_module, + ); - generated_items.globals.push(global); - if let Some(error) = error { - self.errors.push(error); - } - } - // Assume that an error has already been issued - TopLevelStatement::Error => (), - - TopLevelStatement::Module(_) - | TopLevelStatement::Import(_) - | TopLevelStatement::Struct(_) - | TopLevelStatement::Trait(_) - | TopLevelStatement::Impl(_) - | TopLevelStatement::TypeAlias(_) - | TopLevelStatement::SubModule(_) => { - let item = item.to_string(); - let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; - self.errors.push(error.into_compilation_error_pair()); + generated_items.trait_impls.push(UnresolvedTraitImpl { + file_id: self.file, + module_id: self.local_module, + trait_generics: trait_impl.trait_generics, + trait_path: trait_impl.trait_name, + object_type: trait_impl.object_type, + methods, + generics: trait_impl.impl_generics, + where_clause: trait_impl.where_clause, + is_comptime: trait_impl.is_comptime, + + // These last fields are filled in later + trait_id: None, + impl_id: None, + resolved_object_type: None, + resolved_generics: Vec::new(), + resolved_trait_generics: Vec::new(), + }); + } + TopLevelStatement::Global(global) => { + let (global, error) = dc_mod::collect_global( + self.interner, + self.def_maps.get_mut(&self.crate_id).unwrap(), + global, + self.file, + self.local_module, + self.crate_id, + ); + + generated_items.globals.push(global); + if let Some(error) = error { + self.errors.push(error); } } + // Assume that an error has already been issued + TopLevelStatement::Error => (), + + TopLevelStatement::Module(_) + | TopLevelStatement::Import(_) + | TopLevelStatement::Struct(_) + | TopLevelStatement::Trait(_) + | TopLevelStatement::Impl(_) + | TopLevelStatement::TypeAlias(_) + | TopLevelStatement::SubModule(_) => { + let item = item.to_string(); + let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; + self.errors.push(error.into_compilation_error_pair()); + } } } - fn setup_interpreter<'a>( - &'a mut self, - interpreter_errors: &'a mut Vec, - ) -> Interpreter { - Interpreter::new( - self.interner, - &mut self.comptime_scopes, - self.crate_id, - self.debug_comptime_in_file, - interpreter_errors, - ) + pub fn setup_interpreter<'local>(&'local mut self) -> Interpreter<'local, 'context> { + let current_function = match self.current_item { + Some(DependencyId::Function(function)) => Some(function), + _ => None, + }; + Interpreter::new(self, self.crate_id, current_function) } fn debug_comptime T>( @@ -1674,4 +1654,23 @@ impl<'context> Elaborator<'context> { )); } } + + fn run_attributes_on_functions( + &mut self, + function_sets: &[UnresolvedFunctions], + generated_items: &mut CollectedItems, + ) { + for function_set in function_sets { + self.file = function_set.file_id; + self.self_type = function_set.self_type.clone(); + + for (local_module, function_id, function) in &function_set.functions { + self.local_module = *local_module; + let attributes = function.secondary_attributes(); + let item = Value::FunctionDefinition(*function_id); + let span = function.span(); + self.run_comptime_attributes_on_item(attributes, item, span, generated_items); + } + } + } } diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index 7c920230b9d..e24b6a3a067 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -5,7 +5,6 @@ use rustc_hash::FxHashSet as HashSet; use crate::{ ast::{UnresolvedType, ERROR_IDENT}, hir::{ - comptime::Interpreter, def_collector::dc_crate::CompilationError, resolution::errors::ResolverError, type_check::{Source, TypeCheckError}, @@ -15,9 +14,7 @@ use crate::{ stmt::HirPattern, }, macros_api::{HirExpression, Ident, Path, Pattern}, - node_interner::{ - DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, ReferenceId, TraitImplKind, - }, + node_interner::{DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, TraitImplKind}, Shared, StructType, Type, TypeBindings, }; @@ -204,14 +201,12 @@ impl<'context> Elaborator<'context> { let struct_id = struct_type.borrow().id; - let referenced = ReferenceId::Struct(struct_id); - let reference = ReferenceId::Reference(Location::new(name_span, self.file), is_self_type); - self.interner.add_reference(referenced, reference); + let reference_location = Location::new(name_span, self.file); + self.interner.add_struct_reference(struct_id, reference_location, is_self_type); for (field_index, field) in fields.iter().enumerate() { - let referenced = ReferenceId::StructMember(struct_id, field_index); - let reference = ReferenceId::Reference(Location::new(field.0.span(), self.file), false); - self.interner.add_reference(referenced, reference); + let reference_location = Location::new(field.0.span(), self.file); + self.interner.add_struct_member_reference(struct_id, field_index, reference_location); } HirPattern::Struct(expected_type, fields, location) @@ -464,16 +459,8 @@ impl<'context> Elaborator<'context> { // Comptime variables must be replaced with their values if let Some(definition) = self.interner.try_definition(definition_id) { if definition.comptime && !self.in_comptime_context() { - let mut interpreter_errors = vec![]; - let mut interpreter = Interpreter::new( - self.interner, - &mut self.comptime_scopes, - self.crate_id, - self.debug_comptime_in_file, - &mut interpreter_errors, - ); + let mut interpreter = self.setup_interpreter(); let value = interpreter.evaluate(id); - self.include_interpreter_errors(interpreter_errors); return self.inline_comptime_value(value, span); } } @@ -494,7 +481,6 @@ impl<'context> Elaborator<'context> { // This lookup allows support of such statements: let x = foo::bar::SOME_GLOBAL + 10; // If the expression is a singular indent, we search the resolver's current scope as normal. let span = path.span(); - let is_self_type_name = path.last_segment().is_self_type_name(); let (hir_ident, var_scope_index) = self.get_ident_from_path(path); if hir_ident.id != DefinitionId::dummy_id() { @@ -504,10 +490,7 @@ impl<'context> Elaborator<'context> { self.interner.add_function_dependency(current_item, func_id); } - let variable = - ReferenceId::Reference(hir_ident.location, is_self_type_name); - let function = ReferenceId::Function(func_id); - self.interner.add_reference(function, variable); + self.interner.add_function_reference(func_id, hir_ident.location); } DefinitionKind::Global(global_id) => { if let Some(global) = self.unresolved_globals.remove(&global_id) { @@ -517,10 +500,7 @@ impl<'context> Elaborator<'context> { self.interner.add_global_dependency(current_item, global_id); } - let variable = - ReferenceId::Reference(hir_ident.location, is_self_type_name); - let global = ReferenceId::Global(global_id); - self.interner.add_reference(global, variable); + self.interner.add_global_reference(global_id, hir_ident.location); } DefinitionKind::GenericType(_) => { // Initialize numeric generics to a polymorphic integer type in case @@ -536,10 +516,8 @@ impl<'context> Elaborator<'context> { // only local variables can be captured by closures. self.resolve_local_variable(hir_ident.clone(), var_scope_index); - let referenced = ReferenceId::Local(hir_ident.id); - let reference = - ReferenceId::Reference(Location::new(span, self.file), false); - self.interner.add_reference(referenced, reference); + let reference_location = Location::new(span, self.file); + self.interner.add_local_reference(hir_ident.id, reference_location); } } } @@ -609,7 +587,6 @@ impl<'context> Elaborator<'context> { if let Some(definition) = self.interner.try_definition(ident.id) { if let DefinitionKind::Function(function) = definition.kind { let function = self.interner.function_meta(&function); - for mut constraint in function.trait_constraints.clone() { constraint.apply_bindings(&bindings); self.push_trait_constraint(constraint, expr_id); diff --git a/compiler/noirc_frontend/src/elaborator/scope.rs b/compiler/noirc_frontend/src/elaborator/scope.rs index b7016280453..23638b03cf5 100644 --- a/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/compiler/noirc_frontend/src/elaborator/scope.rs @@ -3,10 +3,8 @@ use noirc_errors::{Location, Spanned}; use crate::ast::ERROR_IDENT; use crate::hir::def_map::{LocalModuleId, ModuleId}; use crate::hir::resolution::path_resolver::{PathResolver, StandardPathResolver}; -use crate::hir::resolution::resolver::SELF_TYPE_NAME; use crate::hir::scope::{Scope as GenericScope, ScopeTree as GenericScopeTree}; use crate::macros_api::Ident; -use crate::node_interner::ReferenceId; use crate::{ hir::{ def_map::{ModuleDefId, TryFromModuleDefId}, @@ -22,6 +20,7 @@ use crate::{ }; use crate::{Type, TypeAlias}; +use super::types::SELF_TYPE_NAME; use super::{Elaborator, ResolverMeta}; type Scope = GenericScope; @@ -48,17 +47,30 @@ impl<'context> Elaborator<'context> { let path_resolution; if self.interner.track_references { - let mut references: Vec = Vec::new(); + let last_segment = path.last_segment(); + let location = Location::new(last_segment.span(), self.file); + let is_self_type_name = last_segment.is_self_type_name(); + + let mut references: Vec<_> = Vec::new(); path_resolution = resolver.resolve(self.def_maps, path.clone(), &mut Some(&mut references))?; for (referenced, ident) in references.iter().zip(path.segments) { - let reference = ReferenceId::Reference( + let Some(referenced) = referenced else { + continue; + }; + self.interner.add_reference( + *referenced, Location::new(ident.span(), self.file), ident.is_self_type_name(), ); - self.interner.add_reference(*referenced, reference); } + + self.interner.add_module_def_id_reference( + path_resolution.module_def_id, + location, + is_self_type_name, + ); } else { path_resolution = resolver.resolve(self.def_maps, path, &mut None)?; } diff --git a/compiler/noirc_frontend/src/elaborator/statements.rs b/compiler/noirc_frontend/src/elaborator/statements.rs index 13c59e3494e..48380383eb0 100644 --- a/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/compiler/noirc_frontend/src/elaborator/statements.rs @@ -15,7 +15,7 @@ use crate::{ macros_api::{ ForLoopStatement, ForRange, HirStatement, LetStatement, Path, Statement, StatementKind, }, - node_interner::{DefinitionId, DefinitionKind, GlobalId, ReferenceId, StmtId}, + node_interner::{DefinitionId, DefinitionKind, GlobalId, StmtId}, Type, }; @@ -255,9 +255,8 @@ impl<'context> Elaborator<'context> { typ.follow_bindings() }; - let referenced = ReferenceId::Local(ident.id); - let reference = ReferenceId::Reference(Location::new(span, self.file), false); - self.interner.add_reference(referenced, reference); + let reference_location = Location::new(span, self.file); + self.interner.add_local_reference(ident.id, reference_location); (HirLValue::Ident(ident.clone(), typ.clone()), typ, mutable) } @@ -380,9 +379,8 @@ impl<'context> Elaborator<'context> { Type::Struct(s, args) => { let s = s.borrow(); if let Some((field, index)) = s.get_field(field_name, args) { - let referenced = ReferenceId::StructMember(s.id, index); - let reference = ReferenceId::Reference(Location::new(span, self.file), false); - self.interner.add_reference(referenced, reference); + let reference_location = Location::new(span, self.file); + self.interner.add_struct_member_reference(s.id, index, reference_location); return Some((field, index)); } @@ -447,11 +445,9 @@ impl<'context> Elaborator<'context> { let span = statement.span; let (hir_statement, _typ) = self.elaborate_statement(statement); self.check_and_pop_function_context(); - let mut interpreter_errors = vec![]; - let mut interpreter = self.setup_interpreter(&mut interpreter_errors); + let mut interpreter = self.setup_interpreter(); let value = interpreter.evaluate_statement(hir_statement); let (expr, typ) = self.inline_comptime_value(value, span); - self.include_interpreter_errors(interpreter_errors); let location = self.interner.id_location(hir_statement); self.debug_comptime(location, |interner| expr.to_display_ast(interner).kind); diff --git a/compiler/noirc_frontend/src/elaborator/trait_impls.rs b/compiler/noirc_frontend/src/elaborator/trait_impls.rs new file mode 100644 index 00000000000..853ee6389fd --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/trait_impls.rs @@ -0,0 +1,228 @@ +use crate::{ + graph::CrateId, + hir::def_collector::{dc_crate::UnresolvedTraitImpl, errors::DefCollectorErrorKind}, + ResolvedGeneric, +}; +use crate::{ + hir::def_collector::errors::DuplicateType, + hir_def::{ + traits::{TraitConstraint, TraitFunction}, + types::Generics, + }, + node_interner::{FuncId, TraitId}, + Type, TypeBindings, +}; + +use noirc_errors::Location; +use rustc_hash::FxHashSet as HashSet; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + pub(super) fn collect_trait_impl_methods( + &mut self, + trait_id: TraitId, + trait_impl: &mut UnresolvedTraitImpl, + trait_impl_where_clause: &[TraitConstraint], + ) { + self.local_module = trait_impl.module_id; + self.file = trait_impl.file_id; + + // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation + // for a particular method, the default implementation will be added at that slot. + let mut ordered_methods = Vec::new(); + + // check whether the trait implementation is in the same crate as either the trait or the type + self.check_trait_impl_crate_coherence(trait_id, trait_impl); + + // set of function ids that have a corresponding method in the trait + let mut func_ids_in_trait = HashSet::default(); + + let trait_generics = &self.interner.get_trait(trait_id).generics.clone(); + // Temporarily take ownership of the trait's methods so we can iterate over them + // while also mutating the interner + let the_trait = self.interner.get_trait_mut(trait_id); + let methods = std::mem::take(&mut the_trait.methods); + for method in &methods { + let overrides: Vec<_> = trait_impl + .methods + .functions + .iter() + .filter(|(_, _, f)| f.name() == method.name.0.contents) + .collect(); + + if overrides.is_empty() { + if let Some(default_impl) = &method.default_impl { + // copy 'where' clause from unresolved trait impl + let mut default_impl_clone = default_impl.clone(); + default_impl_clone.def.where_clause.extend(trait_impl.where_clause.clone()); + + let func_id = self.interner.push_empty_fn(); + let module = self.module_id(); + let location = Location::new(default_impl.def.span, trait_impl.file_id); + self.interner.push_function(func_id, &default_impl.def, module, location); + self.define_function_meta(&mut default_impl_clone, func_id, None); + func_ids_in_trait.insert(func_id); + ordered_methods.push(( + method.default_impl_module_id, + func_id, + *default_impl_clone, + )); + } else { + self.push_err(DefCollectorErrorKind::TraitMissingMethod { + trait_name: self.interner.get_trait(trait_id).name.clone(), + method_name: method.name.clone(), + trait_impl_span: trait_impl + .object_type + .span + .expect("type must have a span"), + }); + } + } else { + for (_, func_id, _) in &overrides { + self.check_where_clause_against_trait( + func_id, + method, + trait_impl_where_clause, + &trait_impl.resolved_trait_generics, + trait_generics, + ); + + func_ids_in_trait.insert(*func_id); + } + + if overrides.len() > 1 { + self.push_err(DefCollectorErrorKind::Duplicate { + typ: DuplicateType::TraitAssociatedFunction, + first_def: overrides[0].2.name_ident().clone(), + second_def: overrides[1].2.name_ident().clone(), + }); + } + + ordered_methods.push(overrides[0].clone()); + } + } + + // Restore the methods that were taken before the for loop + let the_trait = self.interner.get_trait_mut(trait_id); + the_trait.set_methods(methods); + + // Emit MethodNotInTrait error for methods in the impl block that + // don't have a corresponding method signature defined in the trait + for (_, func_id, func) in &trait_impl.methods.functions { + if !func_ids_in_trait.contains(func_id) { + let trait_name = the_trait.name.clone(); + let impl_method = func.name_ident().clone(); + let error = DefCollectorErrorKind::MethodNotInTrait { trait_name, impl_method }; + self.errors.push((error.into(), self.file)); + } + } + + trait_impl.methods.functions = ordered_methods; + trait_impl.methods.trait_id = Some(trait_id); + } + + /// Issue an error if the impl is stricter than the trait. + /// + /// # Example + /// + /// ```compile_fail + /// trait MyTrait { } + /// trait Foo { + /// fn foo(); + /// } + /// impl Foo for () { + /// // Error issued here as `foo` does not have the `MyTrait` constraint + /// fn foo() where B: MyTrait {} + /// } + /// ``` + fn check_where_clause_against_trait( + &mut self, + func_id: &FuncId, + method: &TraitFunction, + trait_impl_where_clause: &[TraitConstraint], + impl_trait_generics: &[Type], + trait_generics: &Generics, + ) { + let mut bindings = TypeBindings::new(); + for (trait_generic, impl_trait_generic) in trait_generics.iter().zip(impl_trait_generics) { + bindings.insert( + trait_generic.type_var.id(), + (trait_generic.type_var.clone(), impl_trait_generic.clone()), + ); + } + + let override_meta = self.interner.function_meta(func_id); + // Substitute each generic on the trait function with the corresponding generic on the impl function + for ( + ResolvedGeneric { type_var: trait_fn_generic, .. }, + ResolvedGeneric { name, type_var: impl_fn_generic, kind, .. }, + ) in method.direct_generics.iter().zip(&override_meta.direct_generics) + { + let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone(), kind.clone()); + bindings.insert(trait_fn_generic.id(), (trait_fn_generic.clone(), arg)); + } + + let mut substituted_method_ids = HashSet::default(); + for method_constraint in method.trait_constraints.iter() { + let substituted_constraint_type = method_constraint.typ.substitute(&bindings); + let substituted_trait_generics = method_constraint + .trait_generics + .iter() + .map(|generic| generic.substitute(&bindings)) + .collect::>(); + substituted_method_ids.insert(( + substituted_constraint_type, + method_constraint.trait_id, + substituted_trait_generics, + )); + } + + for override_trait_constraint in override_meta.trait_constraints.clone() { + let override_constraint_is_from_impl = + trait_impl_where_clause.iter().any(|impl_constraint| { + impl_constraint.trait_id == override_trait_constraint.trait_id + }); + if override_constraint_is_from_impl { + continue; + } + + if !substituted_method_ids.contains(&( + override_trait_constraint.typ.clone(), + override_trait_constraint.trait_id, + override_trait_constraint.trait_generics.clone(), + )) { + let the_trait = self.interner.get_trait(override_trait_constraint.trait_id); + self.push_err(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ: override_trait_constraint.typ, + constraint_name: the_trait.name.0.contents.clone(), + constraint_generics: override_trait_constraint.trait_generics, + constraint_span: override_trait_constraint.span, + trait_method_name: method.name.0.contents.clone(), + trait_method_span: method.location.span, + }); + } + } + } + + fn check_trait_impl_crate_coherence( + &mut self, + trait_id: TraitId, + trait_impl: &UnresolvedTraitImpl, + ) { + self.local_module = trait_impl.module_id; + self.file = trait_impl.file_id; + + let object_crate = match &trait_impl.resolved_object_type { + Some(Type::Struct(struct_type, _)) => struct_type.borrow().id.krate(), + _ => CrateId::Dummy, + }; + + let the_trait = self.interner.get_trait(trait_id); + if self.crate_id != the_trait.crate_id && self.crate_id != object_crate { + self.push_err(DefCollectorErrorKind::TraitImplOrphaned { + span: trait_impl.object_type.span.expect("object type must have a span"), + }); + } + } +} diff --git a/compiler/noirc_frontend/src/elaborator/traits.rs b/compiler/noirc_frontend/src/elaborator/traits.rs index 4cd20820c56..a00e770218e 100644 --- a/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/compiler/noirc_frontend/src/elaborator/traits.rs @@ -1,27 +1,39 @@ use std::{collections::BTreeMap, rc::Rc}; use iter_extended::vecmap; -use noirc_errors::Location; +use noirc_errors::{Location, Span}; use crate::{ ast::{ FunctionKind, TraitItem, UnresolvedGeneric, UnresolvedGenerics, UnresolvedTraitConstraint, }, - hir::def_collector::dc_crate::UnresolvedTrait, - hir_def::traits::{TraitConstant, TraitFunction, TraitType}, + hir::{ + def_collector::dc_crate::{ + CollectedItems, CompilationError, UnresolvedTrait, UnresolvedTraitImpl, + }, + type_check::TypeCheckError, + }, + hir_def::{ + function::Parameters, + traits::{TraitConstant, TraitFunction, TraitType}, + }, macros_api::{ BlockExpression, FunctionDefinition, FunctionReturnType, Ident, ItemVisibility, - NoirFunction, Param, Pattern, UnresolvedType, Visibility, + NodeInterner, NoirFunction, Param, Pattern, UnresolvedType, Visibility, }, node_interner::{FuncId, TraitId}, token::Attributes, - Kind, ResolvedGeneric, Type, TypeVariableKind, + Kind, ResolvedGeneric, Type, TypeBindings, TypeVariableKind, }; use super::Elaborator; impl<'context> Elaborator<'context> { - pub fn collect_traits(&mut self, traits: BTreeMap) { + pub fn collect_traits( + &mut self, + traits: BTreeMap, + generated_items: &mut CollectedItems, + ) { for (trait_id, unresolved_trait) in traits { self.recover_generics(|this| { let resolved_generics = this.interner.get_trait(trait_id).generics.clone(); @@ -41,6 +53,11 @@ impl<'context> Elaborator<'context> { this.interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); }); + + let attributes = &unresolved_trait.trait_def.attributes; + let item = crate::hir::comptime::Value::TraitDefinition(trait_id); + let span = unresolved_trait.trait_def.span; + this.run_comptime_attributes_on_item(attributes, item, span, generated_items); }); // This check needs to be after the trait's methods are set since @@ -108,6 +125,7 @@ impl<'context> Elaborator<'context> { let func_id = unresolved_trait.method_ids[&name.0.contents]; this.resolve_trait_function( + trait_id, name, generics, parameters, @@ -121,7 +139,8 @@ impl<'context> Elaborator<'context> { let arguments = vecmap(&func_meta.parameters.0, |(_, typ, _)| typ.clone()); let return_type = func_meta.return_type().clone(); - let generics = vecmap(&this.generics, |generic| generic.type_var.clone()); + let generics = + vecmap(&this.generics.clone(), |generic| generic.type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -146,6 +165,8 @@ impl<'context> Elaborator<'context> { location: Location::new(name.span(), unresolved_trait.file_id), default_impl, default_impl_module_id: unresolved_trait.module_id, + trait_constraints: func_meta.trait_constraints.clone(), + direct_generics: func_meta.direct_generics.clone(), }); }); } @@ -153,8 +174,10 @@ impl<'context> Elaborator<'context> { functions } + #[allow(clippy::too_many_arguments)] pub fn resolve_trait_function( &mut self, + trait_id: TraitId, name: &Ident, generics: &UnresolvedGenerics, parameters: &[(Ident, UnresolvedType)], @@ -188,10 +211,191 @@ impl<'context> Elaborator<'context> { }; let mut function = NoirFunction { kind, def }; - self.define_function_meta(&mut function, func_id, true); + self.define_function_meta(&mut function, func_id, Some(trait_id)); self.elaborate_function(func_id); let _ = self.scopes.end_function(); // Don't check the scope tree for unused variables, they can't be used in a declaration anyway. self.generics.truncate(old_generic_count); } + + pub fn resolve_trait_impl_generics( + &mut self, + trait_impl: &UnresolvedTraitImpl, + trait_id: TraitId, + ) -> Option> { + let trait_def = self.interner.get_trait(trait_id); + let resolved_generics = trait_def.generics.clone(); + if resolved_generics.len() != trait_impl.trait_generics.len() { + self.push_err(CompilationError::TypeError(TypeCheckError::GenericCountMismatch { + item: trait_def.name.to_string(), + expected: resolved_generics.len(), + found: trait_impl.trait_generics.len(), + span: trait_impl.trait_path.span(), + })); + + return None; + } + + let generics = trait_impl.trait_generics.iter().zip(resolved_generics.iter()); + let mapped = generics.map(|(generic, resolved_generic)| { + self.resolve_type_inner(generic.clone(), &resolved_generic.kind) + }); + Some(mapped.collect()) + } +} + +/// Checks that the type of a function in a trait impl matches the type +/// of the corresponding function declaration in the trait itself. +/// +/// To do this, given a trait such as: +/// `trait Foo { fn foo(...); }` +/// +/// And an impl such as: +/// `impl Foo for Bar { fn foo(...); } ` +/// +/// We have to substitute: +/// - Self for Bar +/// - A for D +/// - B for F +/// +/// Before we can type check. Finally, we must also check that the unification +/// result does not introduce any new bindings. This can happen if the impl +/// function's type is more general than that of the trait function. E.g. +/// `fn baz(a: A, b: B)` when the impl required `fn baz(a: A, b: A)`. +/// +/// This does not type check the body of the impl function. +pub(crate) fn check_trait_impl_method_matches_declaration( + interner: &mut NodeInterner, + function: FuncId, +) -> Vec { + let meta = interner.function_meta(&function); + let method_name = interner.function_name(&function); + let mut errors = Vec::new(); + + let definition_type = meta.typ.as_monotype(); + + let impl_ = + meta.trait_impl.expect("Trait impl function should have a corresponding trait impl"); + + // If the trait implementation is not defined in the interner then there was a previous + // error in resolving the trait path and there is likely no trait for this impl. + let Some(impl_) = interner.try_get_trait_implementation(impl_) else { + return errors; + }; + + let impl_ = impl_.borrow(); + let trait_info = interner.get_trait(impl_.trait_id); + + let mut bindings = TypeBindings::new(); + bindings.insert( + trait_info.self_type_typevar_id, + (trait_info.self_type_typevar.clone(), impl_.typ.clone()), + ); + + if trait_info.generics.len() != impl_.trait_generics.len() { + let expected = trait_info.generics.len(); + let found = impl_.trait_generics.len(); + let span = impl_.ident.span(); + let item = trait_info.name.to_string(); + errors.push(TypeCheckError::GenericCountMismatch { item, expected, found, span }); + } + + // Substitute each generic on the trait with the corresponding generic on the impl + for (generic, arg) in trait_info.generics.iter().zip(&impl_.trait_generics) { + bindings.insert(generic.type_var.id(), (generic.type_var.clone(), arg.clone())); + } + + // If this is None, the trait does not have the corresponding function. + // This error should have been caught in name resolution already so we don't + // issue an error for it here. + if let Some(trait_fn_id) = trait_info.method_ids.get(method_name) { + let trait_fn_meta = interner.function_meta(trait_fn_id); + + if trait_fn_meta.direct_generics.len() != meta.direct_generics.len() { + let expected = trait_fn_meta.direct_generics.len(); + let found = meta.direct_generics.len(); + let span = meta.name.location.span; + let item = method_name.to_string(); + errors.push(TypeCheckError::GenericCountMismatch { item, expected, found, span }); + } + + // Substitute each generic on the trait function with the corresponding generic on the impl function + for ( + ResolvedGeneric { type_var: trait_fn_generic, .. }, + ResolvedGeneric { name, type_var: impl_fn_generic, .. }, + ) in trait_fn_meta.direct_generics.iter().zip(&meta.direct_generics) + { + let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone(), Kind::Normal); + bindings.insert(trait_fn_generic.id(), (trait_fn_generic.clone(), arg)); + } + + let (declaration_type, _) = trait_fn_meta.typ.instantiate_with_bindings(bindings, interner); + + check_function_type_matches_expected_type( + &declaration_type, + definition_type, + method_name, + &meta.parameters, + meta.name.location.span, + &trait_info.name.0.contents, + &mut errors, + ); + } + + errors +} + +fn check_function_type_matches_expected_type( + expected: &Type, + actual: &Type, + method_name: &str, + actual_parameters: &Parameters, + span: Span, + trait_name: &str, + errors: &mut Vec, +) { + let mut bindings = TypeBindings::new(); + // Shouldn't need to unify envs, they should always be equal since they're both free functions + if let (Type::Function(params_a, ret_a, _env_a), Type::Function(params_b, ret_b, _env_b)) = + (expected, actual) + { + if params_a.len() == params_b.len() { + for (i, (a, b)) in params_a.iter().zip(params_b.iter()).enumerate() { + if a.try_unify(b, &mut bindings).is_err() { + errors.push(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name: method_name.to_string(), + expected_typ: a.to_string(), + actual_typ: b.to_string(), + parameter_span: actual_parameters.0[i].0.span(), + parameter_index: i + 1, + }); + } + } + + if ret_b.try_unify(ret_a, &mut bindings).is_err() { + errors.push(TypeCheckError::TypeMismatch { + expected_typ: ret_a.to_string(), + expr_typ: ret_b.to_string(), + expr_span: span, + }); + } + } else { + errors.push(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters: params_b.len(), + expected_num_parameters: params_a.len(), + trait_name: trait_name.to_string(), + method_name: method_name.to_string(), + span, + }); + } + } + + // If result bindings is not empty, a type variable was bound which means the two + // signatures were not a perfect match. Note that this relies on us already binding + // all the expected generics to each other prior to this check. + if !bindings.is_empty() { + let expected_typ = expected.to_string(); + let expr_typ = actual.to_string(); + errors.push(TypeCheckError::TypeMismatch { expected_typ, expr_typ, expr_span: span }); + } } diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs index 4e9b3620760..d5dbb170843 100644 --- a/compiler/noirc_frontend/src/elaborator/types.rs +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -12,10 +12,7 @@ use crate::{ hir::{ comptime::{Interpreter, Value}, def_map::ModuleDefId, - resolution::{ - errors::ResolverError, - resolver::{verify_mutable_reference, SELF_TYPE_NAME, WILDCARD_TYPE}, - }, + resolution::errors::ResolverError, type_check::{NoMatchingImplFoundError, Source, TypeCheckError}, }, hir_def::{ @@ -27,11 +24,11 @@ use crate::{ traits::TraitConstraint, }, macros_api::{ - HirExpression, HirLiteral, HirStatement, Path, PathKind, SecondaryAttribute, Signedness, - UnaryOp, UnresolvedType, UnresolvedTypeData, + HirExpression, HirLiteral, HirStatement, NodeInterner, Path, PathKind, SecondaryAttribute, + Signedness, UnaryOp, UnresolvedType, UnresolvedTypeData, }, node_interner::{ - DefinitionKind, DependencyId, ExprId, GlobalId, ReferenceId, TraitId, TraitImplKind, + DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, TraitId, TraitImplKind, TraitMethodId, }, Generics, Kind, ResolvedGeneric, Type, TypeBinding, TypeVariable, TypeVariableKind, @@ -39,6 +36,9 @@ use crate::{ use super::{lints, Elaborator}; +pub const SELF_TYPE_NAME: &str = "Self"; +pub const WILDCARD_TYPE: &str = "_"; + impl<'context> Elaborator<'context> { /// Translates an UnresolvedType to a Type with a `TypeKind::Normal` pub(super) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { @@ -58,12 +58,16 @@ impl<'context> Elaborator<'context> { use crate::ast::UnresolvedTypeData::*; let span = typ.span; - let (is_self_type_name, is_synthetic) = if let Named(ref named_path, _, synthetic) = typ.typ - { - (named_path.last_segment().is_self_type_name(), synthetic) - } else { - (false, false) - }; + let (named_path_span, is_self_type_name, is_synthetic) = + if let Named(ref named_path, _, synthetic) = typ.typ { + ( + Some(named_path.last_segment().span()), + named_path.last_segment().is_self_type_name(), + synthetic, + ) + } else { + (None, false, false) + }; let resolved_type = match typ.typ { FieldElement => Type::FieldElement, @@ -154,30 +158,23 @@ impl<'context> Elaborator<'context> { }; if let Some(unresolved_span) = typ.span { + let location = Location::new(named_path_span.unwrap_or(unresolved_span), self.file); + match resolved_type { Type::Struct(ref struct_type, _) => { // Record the location of the type reference - self.interner.push_type_ref_location( - resolved_type.clone(), - Location::new(unresolved_span, self.file), - ); + self.interner.push_type_ref_location(resolved_type.clone(), location); if !is_synthetic { - let referenced = ReferenceId::Struct(struct_type.borrow().id); - let reference = ReferenceId::Reference( - Location::new(unresolved_span, self.file), + self.interner.add_struct_reference( + struct_type.borrow().id, + location, is_self_type_name, ); - self.interner.add_reference(referenced, reference); } } Type::Alias(ref alias_type, _) => { - let referenced = ReferenceId::Alias(alias_type.borrow().id); - let reference = ReferenceId::Reference( - Location::new(unresolved_span, self.file), - is_self_type_name, - ); - self.interner.add_reference(referenced, reference); + self.interner.add_alias_reference(alias_type.borrow().id, location); } _ => (), } @@ -369,10 +366,8 @@ impl<'context> Elaborator<'context> { self.interner.add_global_dependency(current_item, id); } - let referenced = ReferenceId::Global(id); - let reference = - ReferenceId::Reference(Location::new(path.span(), self.file), false); - self.interner.add_reference(referenced, reference); + let reference_location = Location::new(path.span(), self.file); + self.interner.add_global_reference(id, reference_location); Some(Type::Constant(self.eval_global_as_array_length(id, path))) } @@ -433,6 +428,7 @@ impl<'context> Elaborator<'context> { generic.type_var.clone() })), trait_id, + span: path.span(), }; return Some((method, constraint, false)); @@ -449,28 +445,20 @@ impl<'context> Elaborator<'context> { &mut self, path: &Path, ) -> Option<(TraitMethodId, TraitConstraint, bool)> { - if path.kind == PathKind::Plain && path.segments.len() == 2 { - let method = &path.segments[1]; - - let mut trait_path = path.clone(); - trait_path.pop(); - let trait_id = self.lookup(trait_path).ok()?; - let the_trait = self.interner.get_trait(trait_id); - - let method = the_trait.find_method(method.0.contents.as_str())?; - let constraint = TraitConstraint { - typ: Type::TypeVariable( - the_trait.self_type_typevar.clone(), - TypeVariableKind::Normal, - ), - trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { - generic.type_var.clone() - })), - trait_id, - }; - return Some((method, constraint, false)); - } - None + let func_id: FuncId = self.lookup(path.clone()).ok()?; + let meta = self.interner.function_meta(&func_id); + let trait_id = meta.trait_id?; + let the_trait = self.interner.get_trait(trait_id); + let method = the_trait.find_method(&path.last_segment().0.contents)?; + let constraint = TraitConstraint { + typ: Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), + trait_id, + span: path.span(), + }; + Some((method, constraint, false)) } // This resolves a static trait method T::trait_method by iterating over the where clause @@ -1623,3 +1611,29 @@ impl<'context> Elaborator<'context> { context.trait_constraints.push((constraint, expr_id)); } } + +/// Gives an error if a user tries to create a mutable reference +/// to an immutable variable. +fn verify_mutable_reference(interner: &NodeInterner, rhs: ExprId) -> Result<(), ResolverError> { + match interner.expression(&rhs) { + HirExpression::MemberAccess(member_access) => { + verify_mutable_reference(interner, member_access.lhs) + } + HirExpression::Index(_) => { + let span = interner.expr_span(&rhs); + Err(ResolverError::MutableReferenceToArrayElement { span }) + } + HirExpression::Ident(ident, _) => { + if let Some(definition) = interner.try_definition(ident.id) { + if !definition.mutable { + return Err(ResolverError::MutableReferenceToImmutableVariable { + span: interner.expr_span(&rhs), + variable: definition.name.clone(), + }); + } + } + Ok(()) + } + _ => Ok(()), + } +} diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index 0472b0040e5..b52201146dd 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -7,7 +7,7 @@ use crate::{ token::Tokens, Type, }; -use acvm::{acir::AcirField, FieldElement}; +use acvm::{acir::AcirField, BlackBoxResolutionError, FieldElement}; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Location}; @@ -53,13 +53,11 @@ pub enum InterpreterError { NoImpl { location: Location }, NoMatchingImplFound { error: NoMatchingImplFoundError, file: FileId }, ImplMethodTypeMismatch { expected: Type, actual: Type, location: Location }, - - Unimplemented { item: String, location: Location }, - - // Perhaps this should be unreachable! due to type checking also preventing this error? - // Currently it and the Continue variant are the only interpreter errors without a Location field BreakNotInLoop { location: Location }, ContinueNotInLoop { location: Location }, + BlackBoxError(BlackBoxResolutionError, Location), + + Unimplemented { item: String, location: Location }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -118,9 +116,11 @@ impl InterpreterError { | InterpreterError::Unimplemented { location, .. } | InterpreterError::NoImpl { location, .. } | InterpreterError::ImplMethodTypeMismatch { location, .. } + | InterpreterError::DebugEvaluateComptime { location, .. } + | InterpreterError::BlackBoxError(_, location) | InterpreterError::BreakNotInLoop { location, .. } - | InterpreterError::DebugEvaluateComptime { location, .. } => *location, - InterpreterError::ContinueNotInLoop { location, .. } => *location, + | InterpreterError::ContinueNotInLoop { location, .. } => *location, + InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) } @@ -370,6 +370,9 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { ); CustomDiagnostic::simple_error(msg, String::new(), location.span) } + InterpreterError::BlackBoxError(error, location) => { + CustomDiagnostic::simple_error(error.to_string(), String::new(), location.span) + } InterpreterError::NoMatchingImplFound { error, .. } => error.into(), InterpreterError::Break => unreachable!("Uncaught InterpreterError::Break"), InterpreterError::Continue => unreachable!("Uncaught InterpreterError::Continue"), diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 02714f77605..2090310585c 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -1,13 +1,13 @@ use std::{collections::hash_map::Entry, rc::Rc}; use acvm::{acir::AcirField, FieldElement}; -use fm::FileId; use im::Vector; use iter_extended::try_vecmap; use noirc_errors::Location; use rustc_hash::FxHashMap as HashMap; use crate::ast::{BinaryOpKind, FunctionKind, IntegerBitSize, Signedness}; +use crate::elaborator::Elaborator; use crate::graph::CrateId; use crate::hir_def::expr::ImplKind; use crate::macros_api::UnaryOp; @@ -38,44 +38,29 @@ use super::errors::{IResult, InterpreterError}; use super::value::{unwrap_rc, Value}; mod builtin; +mod foreign; mod unquote; #[allow(unused)] -pub struct Interpreter<'interner> { - /// To expand macros the Interpreter may mutate hir nodes within the NodeInterner - pub interner: &'interner mut NodeInterner, - - /// Each value currently in scope in the interpreter. - /// Each element of the Vec represents a scope with every scope together making - /// up all currently visible definitions. - scopes: &'interner mut Vec>, +pub struct Interpreter<'local, 'interner> { + /// To expand macros the Interpreter needs access to the Elaborator + pub elaborator: &'local mut Elaborator<'interner>, crate_id: CrateId, - /// The scope of --debug-comptime, or None if unset - pub(super) debug_comptime_in_file: Option, - pub(super) debug_comptime_evaluations: &'interner mut Vec, - in_loop: bool, + + current_function: Option, } #[allow(unused)] -impl<'a> Interpreter<'a> { +impl<'local, 'interner> Interpreter<'local, 'interner> { pub(crate) fn new( - interner: &'a mut NodeInterner, - scopes: &'a mut Vec>, + elaborator: &'local mut Elaborator<'interner>, crate_id: CrateId, - debug_comptime_in_file: Option, - debug_comptime_evaluations: &'a mut Vec, + current_function: Option, ) -> Self { - Self { - interner, - scopes, - crate_id, - debug_comptime_in_file, - debug_comptime_evaluations, - in_loop: false, - } + Self { elaborator, crate_id, current_function, in_loop: false } } pub(crate) fn call_function( @@ -85,11 +70,16 @@ impl<'a> Interpreter<'a> { instantiation_bindings: TypeBindings, location: Location, ) -> IResult { - let trait_method = self.interner.get_trait_method_id(function); + let trait_method = self.elaborator.interner.get_trait_method_id(function); perform_instantiation_bindings(&instantiation_bindings); - let impl_bindings = perform_impl_bindings(self.interner, trait_method, function, location)?; + let impl_bindings = + perform_impl_bindings(self.elaborator.interner, trait_method, function, location)?; + let old_function = self.current_function.replace(function); + let result = self.call_function_inner(function, arguments, location); + + self.current_function = old_function; undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(instantiation_bindings); result @@ -101,7 +91,7 @@ impl<'a> Interpreter<'a> { arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - let meta = self.interner.function_meta(&function); + let meta = self.elaborator.interner.function_meta(&function); if meta.parameters.len() != arguments.len() { return Err(InterpreterError::ArgumentCountMismatch { expected: meta.parameters.len(), @@ -110,16 +100,17 @@ impl<'a> Interpreter<'a> { }); } - let is_comptime = self.interner.function_modifiers(&function).is_comptime; + let is_comptime = self.elaborator.interner.function_modifiers(&function).is_comptime; if !is_comptime && meta.source_crate == self.crate_id { // Calling non-comptime functions from within the current crate is restricted // as non-comptime items will have not been elaborated yet. - let function = self.interner.function_name(&function).to_owned(); + let function = self.elaborator.interner.function_name(&function).to_owned(); return Err(InterpreterError::NonComptimeFnCallInSameCrate { function, location }); } if meta.kind != FunctionKind::Normal { - return self.call_builtin(function, arguments, location); + let return_type = meta.return_type().follow_bindings(); + return self.call_builtin(function, arguments, return_type, location); } let parameters = meta.parameters.0.clone(); @@ -129,7 +120,12 @@ impl<'a> Interpreter<'a> { self.define_pattern(parameter, typ, argument, arg_location)?; } - let function_body = self.interner.function(&function).as_expr(); + let function_body = + self.elaborator.interner.function(&function).try_as_expr().ok_or_else(|| { + let function = self.elaborator.interner.function_name(&function).to_owned(); + InterpreterError::NonComptimeFnCallInSameCrate { function, location } + })?; + let result = self.evaluate(function_body)?; self.exit_function(previous_state); @@ -140,18 +136,25 @@ impl<'a> Interpreter<'a> { &mut self, function: FuncId, arguments: Vec<(Value, Location)>, + return_type: Type, location: Location, ) -> IResult { - let attributes = self.interner.function_attributes(&function); + let attributes = self.elaborator.interner.function_attributes(&function); let func_attrs = attributes.function.as_ref() .expect("all builtin functions must contain a function attribute which contains the opcode which it links to"); if let Some(builtin) = func_attrs.builtin() { let builtin = builtin.clone(); - builtin::call_builtin(self.interner, &builtin, arguments, location) + builtin::call_builtin( + self.elaborator.interner, + &builtin, + arguments, + return_type, + location, + ) } else if let Some(foreign) = func_attrs.foreign() { - let item = format!("Comptime evaluation for foreign functions like {foreign}"); - Err(InterpreterError::Unimplemented { item, location }) + let foreign = foreign.clone(); + foreign::call_foreign(self.elaborator.interner, &foreign, arguments, location) } else if let Some(oracle) = func_attrs.oracle() { if oracle == "print" { self.print_oracle(arguments) @@ -160,7 +163,7 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::Unimplemented { item, location }) } } else { - let name = self.interner.function_name(&function); + let name = self.elaborator.interner.function_name(&function); unreachable!("Non-builtin, lowlevel or oracle builtin fn '{name}'") } } @@ -200,8 +203,8 @@ impl<'a> Interpreter<'a> { pub(super) fn enter_function(&mut self) -> (bool, Vec>) { // Drain every scope except the global scope let mut scope = Vec::new(); - if self.scopes.len() > 1 { - scope = self.scopes.drain(1..).collect(); + if self.elaborator.comptime_scopes.len() > 1 { + scope = self.elaborator.comptime_scopes.drain(1..).collect(); } self.push_scope(); (std::mem::take(&mut self.in_loop), scope) @@ -211,21 +214,21 @@ impl<'a> Interpreter<'a> { self.in_loop = state.0; // Keep only the global scope - self.scopes.truncate(1); - self.scopes.append(&mut state.1); + self.elaborator.comptime_scopes.truncate(1); + self.elaborator.comptime_scopes.append(&mut state.1); } pub(super) fn push_scope(&mut self) { - self.scopes.push(HashMap::default()); + self.elaborator.comptime_scopes.push(HashMap::default()); } pub(super) fn pop_scope(&mut self) { - self.scopes.pop(); + self.elaborator.comptime_scopes.pop(); } fn current_scope_mut(&mut self) -> &mut HashMap { // the global scope is always at index zero, so this is always Some - self.scopes.last_mut().unwrap() + self.elaborator.comptime_scopes.last_mut().unwrap() } pub(super) fn define_pattern( @@ -241,6 +244,8 @@ impl<'a> Interpreter<'a> { Ok(()) } HirPattern::Mutable(pattern, _) => { + // Create a mutable reference to store to + let argument = Value::Pointer(Shared::new(argument), true); self.define_pattern(pattern, typ, argument, location) } HirPattern::Tuple(pattern_fields, _) => match (argument, typ) { @@ -306,7 +311,7 @@ impl<'a> Interpreter<'a> { return Ok(()); } - for scope in self.scopes.iter_mut().rev() { + for scope in self.elaborator.comptime_scopes.iter_mut().rev() { if let Entry::Occupied(mut entry) = scope.entry(id) { entry.insert(argument); return Ok(()); @@ -320,7 +325,7 @@ impl<'a> Interpreter<'a> { } pub fn lookup_id(&self, id: DefinitionId, location: Location) -> IResult { - for scope in self.scopes.iter().rev() { + for scope in self.elaborator.comptime_scopes.iter().rev() { if let Some(value) = scope.get(&id) { return Ok(value.clone()); } @@ -329,14 +334,25 @@ impl<'a> Interpreter<'a> { if id == DefinitionId::dummy_id() { Err(InterpreterError::VariableNotInScope { location }) } else { - let name = self.interner.definition_name(id).to_string(); + let name = self.elaborator.interner.definition_name(id).to_string(); Err(InterpreterError::NonComptimeVarReferenced { name, location }) } } - /// Evaluate an expression and return the result + /// Evaluate an expression and return the result. + /// This will automatically dereference a mutable variable if used. pub fn evaluate(&mut self, id: ExprId) -> IResult { - match self.interner.expression(&id) { + match self.evaluate_no_dereference(id)? { + Value::Pointer(elem, true) => Ok(elem.borrow().clone()), + other => Ok(other), + } + } + + /// Evaluating a mutable variable will dereference it automatically. + /// This function should be used when that is not desired - e.g. when + /// compiling a `&mut var` expression to grab the original reference. + fn evaluate_no_dereference(&mut self, id: ExprId) -> IResult { + match self.elaborator.interner.expression(&id) { HirExpression::Ident(ident, _) => self.evaluate_ident(ident, id), HirExpression::Literal(literal) => self.evaluate_literal(literal, id), HirExpression::Block(block) => self.evaluate_block(block), @@ -356,33 +372,34 @@ impl<'a> Interpreter<'a> { HirExpression::Unquote(tokens) => { // An Unquote expression being found is indicative of a macro being // expanded within another comptime fn which we don't currently support. - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InterpreterError::UnquoteFoundDuringEvaluation { location }) } HirExpression::Error => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InterpreterError::ErrorNodeEncountered { location }) } } } pub(super) fn evaluate_ident(&mut self, ident: HirIdent, id: ExprId) -> IResult { - let definition = self.interner.try_definition(ident.id).ok_or_else(|| { - let location = self.interner.expr_location(&id); + let definition = self.elaborator.interner.try_definition(ident.id).ok_or_else(|| { + let location = self.elaborator.interner.expr_location(&id); InterpreterError::VariableNotInScope { location } })?; if let ImplKind::TraitMethod(method, _, _) = ident.impl_kind { - let method_id = resolve_trait_method(self.interner, method, id)?; - let typ = self.interner.id_type(id).follow_bindings(); - let bindings = self.interner.get_instantiation_bindings(id).clone(); + let method_id = resolve_trait_method(self.elaborator.interner, method, id)?; + let typ = self.elaborator.interner.id_type(id).follow_bindings(); + let bindings = self.elaborator.interner.get_instantiation_bindings(id).clone(); return Ok(Value::Function(method_id, typ, Rc::new(bindings))); } match &definition.kind { DefinitionKind::Function(function_id) => { - let typ = self.interner.id_type(id).follow_bindings(); - let bindings = Rc::new(self.interner.get_instantiation_bindings(id).clone()); + let typ = self.elaborator.interner.id_type(id).follow_bindings(); + let bindings = + Rc::new(self.elaborator.interner.get_instantiation_bindings(id).clone()); Ok(Value::Function(*function_id, typ, bindings)) } DefinitionKind::Local(_) => self.lookup(&ident), @@ -391,13 +408,16 @@ impl<'a> Interpreter<'a> { if let Ok(value) = self.lookup(&ident) { Ok(value) } else { + let crate_of_global = self.elaborator.interner.get_global(*global_id).crate_id; let let_ = - self.interner.get_global_let_statement(*global_id).ok_or_else(|| { - let location = self.interner.expr_location(&id); - InterpreterError::VariableNotInScope { location } - })?; + self.elaborator.interner.get_global_let_statement(*global_id).ok_or_else( + || { + let location = self.elaborator.interner.expr_location(&id); + InterpreterError::VariableNotInScope { location } + }, + )?; - if let_.comptime { + if let_.comptime || crate_of_global != self.crate_id { self.evaluate_let(let_.clone())?; } self.lookup(&ident) @@ -410,10 +430,10 @@ impl<'a> Interpreter<'a> { }; if let Some(value) = value { - let typ = self.interner.id_type(id); + let typ = self.elaborator.interner.id_type(id); self.evaluate_integer((value as u128).into(), false, id) } else { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let typ = Type::TypeVariable(type_variable.clone(), TypeVariableKind::Normal); Err(InterpreterError::NonIntegerArrayLength { typ, location }) } @@ -431,7 +451,7 @@ impl<'a> Interpreter<'a> { HirLiteral::Str(string) => Ok(Value::String(Rc::new(string))), HirLiteral::FmtStr(_, _) => { let item = "format strings in a comptime context".into(); - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InterpreterError::Unimplemented { item, location }) } HirLiteral::Array(array) => self.evaluate_array(array, id), @@ -445,8 +465,8 @@ impl<'a> Interpreter<'a> { is_negative: bool, id: ExprId, ) -> IResult { - let typ = self.interner.id_type(id).follow_bindings(); - let location = self.interner.expr_location(&id); + let typ = self.elaborator.interner.id_type(id).follow_bindings(); + let location = self.elaborator.interner.expr_location(&id); if let Type::FieldElement = &typ { Ok(Value::Field(value)) @@ -559,7 +579,7 @@ impl<'a> Interpreter<'a> { } fn evaluate_array(&mut self, array: HirArrayLiteral, id: ExprId) -> IResult { - let typ = self.interner.id_type(id).follow_bindings(); + let typ = self.elaborator.interner.id_type(id).follow_bindings(); match array { HirArrayLiteral::Standard(elements) => { @@ -577,7 +597,7 @@ impl<'a> Interpreter<'a> { let elements = (0..length).map(|_| element.clone()).collect(); Ok(Value::Array(elements, typ)) } else { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InterpreterError::NonIntegerArrayLength { typ: length, location }) } } @@ -592,8 +612,16 @@ impl<'a> Interpreter<'a> { } fn evaluate_prefix(&mut self, prefix: HirPrefixExpression, id: ExprId) -> IResult { - let rhs = self.evaluate(prefix.rhs)?; - self.evaluate_prefix_with_value(rhs, prefix.operator, id) + let rhs = match prefix.operator { + UnaryOp::MutableReference => self.evaluate_no_dereference(prefix.rhs)?, + _ => self.evaluate(prefix.rhs)?, + }; + + if self.elaborator.interner.get_selected_impl_for_expression(id).is_some() { + self.evaluate_overloaded_prefix(prefix, rhs, id) + } else { + self.evaluate_prefix_with_value(rhs, prefix.operator, id) + } } fn evaluate_prefix_with_value( @@ -614,7 +642,7 @@ impl<'a> Interpreter<'a> { Value::U32(value) => Ok(Value::U32(0 - value)), Value::U64(value) => Ok(Value::U64(0 - value)), value => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let operator = "minus"; Err(InterpreterError::InvalidValueForUnary { value, location, operator }) } @@ -630,15 +658,23 @@ impl<'a> Interpreter<'a> { Value::U32(value) => Ok(Value::U32(!value)), Value::U64(value) => Ok(Value::U64(!value)), value => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InterpreterError::InvalidValueForUnary { value, location, operator: "not" }) } }, - UnaryOp::MutableReference => Ok(Value::Pointer(Shared::new(rhs))), + UnaryOp::MutableReference => { + // If this is a mutable variable (auto_deref = true), turn this into an explicit + // mutable reference just by switching the value of `auto_deref`. Otherwise, wrap + // the value in a fresh reference. + match rhs { + Value::Pointer(elem, true) => Ok(Value::Pointer(elem, false)), + other => Ok(Value::Pointer(Shared::new(other), false)), + } + } UnaryOp::Dereference { implicitly_added: _ } => match rhs { - Value::Pointer(element) => Ok(element.borrow().clone()), + Value::Pointer(element, _) => Ok(element.borrow().clone()), value => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InterpreterError::NonPointerDereferenced { value, location }) } }, @@ -649,7 +685,7 @@ impl<'a> Interpreter<'a> { let lhs = self.evaluate(infix.lhs)?; let rhs = self.evaluate(infix.rhs)?; - if self.interner.get_selected_impl_for_expression(id).is_some() { + if self.elaborator.interner.get_selected_impl_for_expression(id).is_some() { return self.evaluate_overloaded_infix(infix, lhs, rhs, id); } @@ -666,7 +702,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs + rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs + rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "+" }) } }, @@ -681,7 +717,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs - rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs - rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "-" }) } }, @@ -696,7 +732,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs * rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs * rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "*" }) } }, @@ -711,7 +747,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs / rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs / rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "/" }) } }, @@ -726,7 +762,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs == rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "==" }) } }, @@ -741,7 +777,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs != rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "!=" }) } }, @@ -756,7 +792,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs < rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "<" }) } }, @@ -771,7 +807,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "<=" }) } }, @@ -786,7 +822,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs > rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: ">" }) } }, @@ -801,7 +837,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: ">=" }) } }, @@ -816,7 +852,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs & rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs & rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "&" }) } }, @@ -831,7 +867,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs | rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs | rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "|" }) } }, @@ -846,7 +882,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs ^ rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs ^ rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "^" }) } }, @@ -860,7 +896,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs >> rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs >> rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: ">>" }) } }, @@ -874,7 +910,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs << rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs << rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "<<" }) } }, @@ -888,7 +924,7 @@ impl<'a> Interpreter<'a> { (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs % rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs % rhs)), (lhs, rhs) => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); Err(InvalidValuesForBinary { lhs, rhs, location, operator: "%" }) } }, @@ -905,13 +941,13 @@ impl<'a> Interpreter<'a> { let method = infix.trait_method_id; let operator = infix.operator.kind; - let method_id = resolve_trait_method(self.interner, method, id)?; - let type_bindings = self.interner.get_instantiation_bindings(id).clone(); + let method_id = resolve_trait_method(self.elaborator.interner, method, id)?; + let type_bindings = self.elaborator.interner.get_instantiation_bindings(id).clone(); - let lhs = (lhs, self.interner.expr_location(&infix.lhs)); - let rhs = (rhs, self.interner.expr_location(&infix.rhs)); + let lhs = (lhs, self.elaborator.interner.expr_location(&infix.lhs)); + let rhs = (rhs, self.elaborator.interner.expr_location(&infix.rhs)); - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let value = self.call_function(method_id, vec![lhs, rhs], type_bindings, location)?; // Certain operators add additional operations after the trait call: @@ -925,6 +961,25 @@ impl<'a> Interpreter<'a> { } } + fn evaluate_overloaded_prefix( + &mut self, + prefix: HirPrefixExpression, + rhs: Value, + id: ExprId, + ) -> IResult { + let method = + prefix.trait_method_id.expect("ice: expected prefix operator trait at this point"); + let operator = prefix.operator; + + let method_id = resolve_trait_method(self.elaborator.interner, method, id)?; + let type_bindings = self.elaborator.interner.get_instantiation_bindings(id).clone(); + + let rhs = (rhs, self.elaborator.interner.expr_location(&prefix.rhs)); + + let location = self.elaborator.interner.expr_location(&id); + self.call_function(method_id, vec![rhs], type_bindings, location) + } + /// Given the result of a `cmp` operation, convert it into the boolean result of the given operator. /// - `<`: `ordering == Ordering::Less` /// - `<=`: `ordering != Ordering::Greater` @@ -957,7 +1012,7 @@ impl<'a> Interpreter<'a> { let array = self.evaluate(index.collection)?; let index = self.evaluate(index.index)?; - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let (array, index) = self.bounds_check(array, index, location)?; Ok(array[index].clone()) @@ -1021,7 +1076,7 @@ impl<'a> Interpreter<'a> { }) .collect::>()?; - let typ = self.interner.id_type(id).follow_bindings(); + let typ = self.elaborator.interner.id_type(id).follow_bindings(); Ok(Value::Struct(fields, typ)) } @@ -1041,13 +1096,13 @@ impl<'a> Interpreter<'a> { (fields, Type::Tuple(field_types)) } value => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); return Err(InterpreterError::NonTupleOrStructInMemberAccess { value, location }); } }; fields.get(&access.rhs.0.contents).cloned().ok_or_else(|| { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let value = Value::Struct(fields, struct_type); let field_name = access.rhs.0.contents; InterpreterError::ExpectedStructToHaveField { value, field_name, location } @@ -1057,14 +1112,22 @@ impl<'a> Interpreter<'a> { fn evaluate_call(&mut self, call: HirCallExpression, id: ExprId) -> IResult { let function = self.evaluate(call.func)?; let arguments = try_vecmap(call.arguments, |arg| { - Ok((self.evaluate(arg)?, self.interner.expr_location(&arg))) + Ok((self.evaluate(arg)?, self.elaborator.interner.expr_location(&arg))) })?; - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); match function { Value::Function(function_id, _, bindings) => { let bindings = unwrap_rc(bindings); - self.call_function(function_id, arguments, bindings, location) + let mut result = self.call_function(function_id, arguments, bindings, location)?; + if call.is_macro_call { + let expr = result.into_expression(self.elaborator.interner, location)?; + let expr = self + .elaborator + .elaborate_expression_from_comptime(expr, self.current_function); + result = self.evaluate(expr)?; + } + Ok(result) } Value::Closure(closure, env, _) => self.call_closure(closure, env, arguments, location), value => Err(InterpreterError::NonFunctionCalled { value, location }), @@ -1078,19 +1141,22 @@ impl<'a> Interpreter<'a> { ) -> IResult { let object = self.evaluate(call.object)?; let arguments = try_vecmap(call.arguments, |arg| { - Ok((self.evaluate(arg)?, self.interner.expr_location(&arg))) + Ok((self.evaluate(arg)?, self.elaborator.interner.expr_location(&arg))) })?; - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let typ = object.get_type().follow_bindings(); let method_name = &call.method.0.contents; // TODO: Traits let method = match &typ { - Type::Struct(struct_def, _) => { - self.interner.lookup_method(&typ, struct_def.borrow().id, method_name, false) - } - _ => self.interner.lookup_primitive_method(&typ, method_name), + Type::Struct(struct_def, _) => self.elaborator.interner.lookup_method( + &typ, + struct_def.borrow().id, + method_name, + false, + ), + _ => self.elaborator.interner.lookup_primitive_method(&typ, method_name), }; if let Some(method) = method { @@ -1102,7 +1168,7 @@ impl<'a> Interpreter<'a> { fn evaluate_cast(&mut self, cast: &HirCastExpression, id: ExprId) -> IResult { let evaluated_lhs = self.evaluate(cast.lhs)?; - Self::evaluate_cast_one_step(cast, id, evaluated_lhs, self.interner) + Self::evaluate_cast_one_step(cast, id, evaluated_lhs, self.elaborator.interner) } /// evaluate_cast without recursion @@ -1204,7 +1270,7 @@ impl<'a> Interpreter<'a> { let condition = match self.evaluate(if_.condition)? { Value::Bool(value) => value, value => { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); return Err(InterpreterError::NonBoolUsedInIf { value, location }); } }; @@ -1235,22 +1301,22 @@ impl<'a> Interpreter<'a> { } fn evaluate_lambda(&mut self, lambda: HirLambda, id: ExprId) -> IResult { - let location = self.interner.expr_location(&id); + let location = self.elaborator.interner.expr_location(&id); let environment = try_vecmap(&lambda.captures, |capture| self.lookup_id(capture.ident.id, location))?; - let typ = self.interner.id_type(id).follow_bindings(); + let typ = self.elaborator.interner.id_type(id).follow_bindings(); Ok(Value::Closure(lambda, environment, typ)) } fn evaluate_quote(&mut self, mut tokens: Tokens, expr_id: ExprId) -> IResult { - let location = self.interner.expr_location(&expr_id); + let location = self.elaborator.interner.expr_location(&expr_id); tokens = self.substitute_unquoted_values_into_tokens(tokens, location)?; Ok(Value::Code(Rc::new(tokens))) } pub fn evaluate_statement(&mut self, statement: StmtId) -> IResult { - match self.interner.statement(&statement) { + match self.elaborator.interner.statement(&statement) { HirStatement::Let(let_) => self.evaluate_let(let_), HirStatement::Constrain(constrain) => self.evaluate_constrain(constrain), HirStatement::Assign(assign) => self.evaluate_assign(assign), @@ -1264,7 +1330,7 @@ impl<'a> Interpreter<'a> { Ok(Value::Unit) } HirStatement::Error => { - let location = self.interner.id_location(statement); + let location = self.elaborator.interner.id_location(statement); Err(InterpreterError::ErrorNodeEncountered { location }) } } @@ -1272,7 +1338,7 @@ impl<'a> Interpreter<'a> { pub fn evaluate_let(&mut self, let_: HirLetStatement) -> IResult { let rhs = self.evaluate(let_.expression)?; - let location = self.interner.expr_location(&let_.expression); + let location = self.elaborator.interner.expr_location(&let_.expression); self.define_pattern(&let_.pattern, &let_.r#type, rhs, location)?; Ok(Value::Unit) } @@ -1281,12 +1347,12 @@ impl<'a> Interpreter<'a> { match self.evaluate(constrain.0)? { Value::Bool(true) => Ok(Value::Unit), Value::Bool(false) => { - let location = self.interner.expr_location(&constrain.0); + let location = self.elaborator.interner.expr_location(&constrain.0); let message = constrain.2.and_then(|expr| self.evaluate(expr).ok()); Err(InterpreterError::FailingConstraint { location, message }) } value => { - let location = self.interner.expr_location(&constrain.0); + let location = self.elaborator.interner.expr_location(&constrain.0); Err(InterpreterError::NonBoolUsedInConstrain { value, location }) } } @@ -1303,7 +1369,7 @@ impl<'a> Interpreter<'a> { HirLValue::Ident(ident, typ) => self.mutate(ident.id, rhs, ident.location), HirLValue::Dereference { lvalue, element_type: _, location } => { match self.evaluate_lvalue(&lvalue)? { - Value::Pointer(value) => { + Value::Pointer(value, _) => { *value.borrow_mut() = rhs; Ok(()) } @@ -1353,10 +1419,13 @@ impl<'a> Interpreter<'a> { fn evaluate_lvalue(&mut self, lvalue: &HirLValue) -> IResult { match lvalue { - HirLValue::Ident(ident, _) => self.lookup(ident), + HirLValue::Ident(ident, _) => match self.lookup(ident)? { + Value::Pointer(elem, true) => Ok(elem.borrow().clone()), + other => Ok(other), + }, HirLValue::Dereference { lvalue, element_type: _, location } => { match self.evaluate_lvalue(lvalue)? { - Value::Pointer(value) => Ok(value.borrow().clone()), + Value::Pointer(value, _) => Ok(value.borrow().clone()), value => { Err(InterpreterError::NonPointerDereferenced { value, location: *location }) } @@ -1403,7 +1472,7 @@ impl<'a> Interpreter<'a> { Value::U32(value) => Ok((value as i128, |i| Value::U32(i as u32))), Value::U64(value) => Ok((value as i128, |i| Value::U64(i as u64))), value => { - let location = this.interner.expr_location(&expr); + let location = this.elaborator.interner.expr_location(&expr); Err(InterpreterError::NonIntegerUsedInLoop { value, location }) } } @@ -1435,7 +1504,7 @@ impl<'a> Interpreter<'a> { if self.in_loop { Err(InterpreterError::Break) } else { - let location = self.interner.statement_location(id); + let location = self.elaborator.interner.statement_location(id); Err(InterpreterError::BreakNotInLoop { location }) } } @@ -1444,7 +1513,7 @@ impl<'a> Interpreter<'a> { if self.in_loop { Err(InterpreterError::Continue) } else { - let location = self.interner.statement_location(id); + let location = self.elaborator.interner.statement_location(id); Err(InterpreterError::ContinueNotInLoop { location }) } } diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 399d9905269..02c45165ee3 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -1,25 +1,40 @@ -use std::rc::Rc; +use std::{ + hash::{Hash, Hasher}, + rc::Rc, +}; -use noirc_errors::Location; +use acvm::{AcirField, FieldElement}; +use chumsky::Parser; +use iter_extended::{try_vecmap, vecmap}; +use noirc_errors::{Location, Span}; +use rustc_hash::FxHashMap as HashMap; use crate::{ - ast::IntegerBitSize, + ast::{IntegerBitSize, TraitBound}, hir::comptime::{errors::IResult, InterpreterError, Value}, - macros_api::{NodeInterner, Signedness}, + macros_api::{NodeInterner, Path, Signedness, UnresolvedTypeData}, + node_interner::TraitId, + parser, token::{SpannedToken, Token, Tokens}, - QuotedType, Type, + QuotedType, Shared, Type, }; pub(super) fn call_builtin( interner: &mut NodeInterner, name: &str, arguments: Vec<(Value, Location)>, + return_type: Type, location: Location, ) -> IResult { match name { "array_len" => array_len(interner, arguments, location), "as_slice" => as_slice(interner, arguments, location), "is_unconstrained" => Ok(Value::Bool(true)), + "modulus_be_bits" => modulus_be_bits(interner, arguments, location), + "modulus_be_bytes" => modulus_be_bytes(interner, arguments, location), + "modulus_le_bits" => modulus_le_bits(interner, arguments, location), + "modulus_le_bytes" => modulus_le_bytes(interner, arguments, location), + "modulus_num_bits" => modulus_num_bits(interner, arguments, location), "slice_insert" => slice_insert(interner, arguments, location), "slice_pop_back" => slice_pop_back(interner, arguments, location), "slice_pop_front" => slice_pop_front(interner, arguments, location), @@ -29,6 +44,13 @@ pub(super) fn call_builtin( "struct_def_as_type" => struct_def_as_type(interner, arguments, location), "struct_def_fields" => struct_def_fields(interner, arguments, location), "struct_def_generics" => struct_def_generics(interner, arguments, location), + "trait_constraint_eq" => trait_constraint_eq(interner, arguments, location), + "trait_constraint_hash" => trait_constraint_hash(interner, arguments, location), + "trait_def_as_trait_constraint" => { + trait_def_as_trait_constraint(interner, arguments, location) + } + "quoted_as_trait_constraint" => quoted_as_trait_constraint(interner, arguments, location), + "zeroed" => zeroed(return_type, location), _ => { let item = format!("Comptime evaluation for builtin function {name}"); Err(InterpreterError::Unimplemented { item, location }) @@ -36,7 +58,7 @@ pub(super) fn call_builtin( } } -fn check_argument_count( +pub(super) fn check_argument_count( expected: usize, arguments: &[(Value, Location)], location: Location, @@ -54,6 +76,21 @@ fn failing_constraint(message: impl Into, location: Location) -> IRes Err(InterpreterError::FailingConstraint { message, location }) } +pub(super) fn get_array( + interner: &NodeInterner, + value: Value, + location: Location, +) -> IResult<(im::Vector, Type)> { + match value { + Value::Array(values, typ) => Ok((values, typ)), + value => { + let type_var = Box::new(interner.next_type_variable()); + let expected = Type::Array(type_var.clone(), type_var); + Err(InterpreterError::TypeMismatch { expected, value, location }) + } + } +} + fn get_slice( interner: &NodeInterner, value: Value, @@ -69,7 +106,16 @@ fn get_slice( } } -fn get_u32(value: Value, location: Location) -> IResult { +pub(super) fn get_field(value: Value, location: Location) -> IResult { + match value { + Value::Field(value) => Ok(value), + value => { + Err(InterpreterError::TypeMismatch { expected: Type::FieldElement, value, location }) + } + } +} + +pub(super) fn get_u32(value: Value, location: Location) -> IResult { match value { Value::U32(value) => Ok(value), value => { @@ -79,6 +125,36 @@ fn get_u32(value: Value, location: Location) -> IResult { } } +fn get_trait_constraint(value: Value, location: Location) -> IResult { + match value { + Value::TraitConstraint(bound) => Ok(bound), + value => { + let expected = Type::Quoted(QuotedType::TraitConstraint); + Err(InterpreterError::TypeMismatch { expected, value, location }) + } + } +} + +fn get_trait_def(value: Value, location: Location) -> IResult { + match value { + Value::TraitDefinition(id) => Ok(id), + value => { + let expected = Type::Quoted(QuotedType::TraitDefinition); + Err(InterpreterError::TypeMismatch { expected, value, location }) + } + } +} + +fn get_quoted(value: Value, location: Location) -> IResult> { + match value { + Value::Code(tokens) => Ok(tokens), + value => { + let expected = Type::Quoted(QuotedType::Quoted); + Err(InterpreterError::TypeMismatch { expected, value, location }) + } + } +} + fn array_len( interner: &NodeInterner, mut arguments: Vec<(Value, Location)>, @@ -231,7 +307,7 @@ fn slice_remove( interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, location: Location, -) -> Result { +) -> IResult { check_argument_count(2, &arguments, location)?; let index = get_u32(arguments.pop().unwrap().0, location)? as usize; @@ -257,7 +333,7 @@ fn slice_push_front( interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, location: Location, -) -> Result { +) -> IResult { check_argument_count(2, &arguments, location)?; let (element, _) = arguments.pop().unwrap(); @@ -270,7 +346,7 @@ fn slice_pop_front( interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, location: Location, -) -> Result { +) -> IResult { check_argument_count(1, &arguments, location)?; let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; @@ -284,7 +360,7 @@ fn slice_pop_back( interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, location: Location, -) -> Result { +) -> IResult { check_argument_count(1, &arguments, location)?; let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; @@ -298,7 +374,7 @@ fn slice_insert( interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, location: Location, -) -> Result { +) -> IResult { check_argument_count(3, &arguments, location)?; let (element, _) = arguments.pop().unwrap(); @@ -307,3 +383,220 @@ fn slice_insert( values.insert(index as usize, element); Ok(Value::Slice(values, typ)) } + +// fn as_trait_constraint(quoted: Quoted) -> TraitConstraint +fn quoted_as_trait_constraint( + _interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(1, &arguments, location)?; + + let tokens = get_quoted(arguments.pop().unwrap().0, location)?; + let quoted = tokens.as_ref().clone(); + + let trait_bound = parser::trait_bound().parse(quoted).map_err(|mut errors| { + let error = errors.swap_remove(0); + let rule = "a trait constraint"; + InterpreterError::FailedToParseMacro { error, tokens, rule, file: location.file } + })?; + + Ok(Value::TraitConstraint(trait_bound)) +} + +// fn constraint_hash(constraint: TraitConstraint) -> Field +fn trait_constraint_hash( + _interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(1, &arguments, location)?; + + let bound = get_trait_constraint(arguments.pop().unwrap().0, location)?; + + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + bound.hash(&mut hasher); + let hash = hasher.finish(); + + Ok(Value::Field((hash as u128).into())) +} + +// fn constraint_eq(constraint_a: TraitConstraint, constraint_b: TraitConstraint) -> bool +fn trait_constraint_eq( + _interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(2, &arguments, location)?; + + let constraint_b = get_trait_constraint(arguments.pop().unwrap().0, location)?; + let constraint_a = get_trait_constraint(arguments.pop().unwrap().0, location)?; + + Ok(Value::Bool(constraint_a == constraint_b)) +} + +// fn zeroed() -> T +fn zeroed(return_type: Type, location: Location) -> IResult { + match return_type { + Type::FieldElement => Ok(Value::Field(0u128.into())), + Type::Array(length_type, elem) => { + if let Some(length) = length_type.evaluate_to_u32() { + let element = zeroed(elem.as_ref().clone(), location)?; + let array = std::iter::repeat(element).take(length as usize).collect(); + Ok(Value::Array(array, Type::Array(length_type, elem))) + } else { + // Assume we can resolve the length later + Ok(Value::Zeroed(Type::Array(length_type, elem))) + } + } + Type::Slice(_) => Ok(Value::Slice(im::Vector::new(), return_type)), + Type::Integer(sign, bits) => match (sign, bits) { + (Signedness::Unsigned, IntegerBitSize::One) => Ok(Value::U8(0)), + (Signedness::Unsigned, IntegerBitSize::Eight) => Ok(Value::U8(0)), + (Signedness::Unsigned, IntegerBitSize::Sixteen) => Ok(Value::U16(0)), + (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => Ok(Value::U32(0)), + (Signedness::Unsigned, IntegerBitSize::SixtyFour) => Ok(Value::U64(0)), + (Signedness::Signed, IntegerBitSize::One) => Ok(Value::I8(0)), + (Signedness::Signed, IntegerBitSize::Eight) => Ok(Value::I8(0)), + (Signedness::Signed, IntegerBitSize::Sixteen) => Ok(Value::I16(0)), + (Signedness::Signed, IntegerBitSize::ThirtyTwo) => Ok(Value::I32(0)), + (Signedness::Signed, IntegerBitSize::SixtyFour) => Ok(Value::I64(0)), + }, + Type::Bool => Ok(Value::Bool(false)), + Type::String(length_type) => { + if let Some(length) = length_type.evaluate_to_u32() { + Ok(Value::String(Rc::new("\0".repeat(length as usize)))) + } else { + // Assume we can resolve the length later + Ok(Value::Zeroed(Type::String(length_type))) + } + } + Type::FmtString(_, _) => { + let item = "format strings in a comptime context".into(); + Err(InterpreterError::Unimplemented { item, location }) + } + Type::Unit => Ok(Value::Unit), + Type::Tuple(fields) => { + Ok(Value::Tuple(try_vecmap(fields, |field| zeroed(field, location))?)) + } + Type::Struct(struct_type, generics) => { + let fields = struct_type.borrow().get_fields(&generics); + let mut values = HashMap::default(); + + for (field_name, field_type) in fields { + let field_value = zeroed(field_type, location)?; + values.insert(Rc::new(field_name), field_value); + } + + let typ = Type::Struct(struct_type, generics); + Ok(Value::Struct(values, typ)) + } + Type::Alias(alias, generics) => zeroed(alias.borrow().get_type(&generics), location), + typ @ Type::Function(..) => { + // Using Value::Zeroed here is probably safer than using FuncId::dummy_id() or similar + Ok(Value::Zeroed(typ)) + } + Type::MutableReference(element) => { + let element = zeroed(*element, location)?; + Ok(Value::Pointer(Shared::new(element), false)) + } + Type::Quoted(QuotedType::TraitConstraint) => Ok(Value::TraitConstraint(TraitBound { + trait_path: Path::from_single(String::new(), Span::default()), + trait_id: None, + trait_generics: Vec::new(), + })), + // Optimistically assume we can resolve this type later or that the value is unused + Type::TypeVariable(_, _) + | Type::Forall(_, _) + | Type::Constant(_) + | Type::Quoted(_) + | Type::Error + | Type::TraitAsType(_, _, _) + | Type::NamedGeneric(_, _, _) => Ok(Value::Zeroed(return_type)), + } +} + +fn modulus_be_bits( + _interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(0, &arguments, location)?; + + let bits = FieldElement::modulus().to_radix_be(2); + let bits_vector = bits.into_iter().map(|bit| Value::U1(bit != 0)).collect(); + + let int_type = Type::Integer(crate::ast::Signedness::Unsigned, IntegerBitSize::One); + let typ = Type::Slice(Box::new(int_type)); + Ok(Value::Slice(bits_vector, typ)) +} + +fn modulus_be_bytes( + _interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(0, &arguments, location)?; + + let bytes = FieldElement::modulus().to_bytes_be(); + let bytes_vector = bytes.into_iter().map(Value::U8).collect(); + + let int_type = Type::Integer(crate::ast::Signedness::Unsigned, IntegerBitSize::Eight); + let typ = Type::Slice(Box::new(int_type)); + Ok(Value::Slice(bytes_vector, typ)) +} + +fn modulus_le_bits( + interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let Value::Slice(bits, typ) = modulus_be_bits(interner, arguments, location)? else { + unreachable!("modulus_be_bits must return slice") + }; + let reversed_bits = bits.into_iter().rev().collect(); + Ok(Value::Slice(reversed_bits, typ)) +} + +fn modulus_le_bytes( + interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let Value::Slice(bytes, typ) = modulus_be_bytes(interner, arguments, location)? else { + unreachable!("modulus_be_bytes must return slice") + }; + let reversed_bytes = bytes.into_iter().rev().collect(); + Ok(Value::Slice(reversed_bytes, typ)) +} + +fn modulus_num_bits( + _interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(0, &arguments, location)?; + let bits = FieldElement::max_num_bits().into(); + Ok(Value::U64(bits)) +} + +fn trait_def_as_trait_constraint( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> Result { + check_argument_count(1, &arguments, location)?; + + let trait_id = get_trait_def(arguments.pop().unwrap().0, location)?; + let the_trait = interner.get_trait(trait_id); + + let trait_path = Path::from_ident(the_trait.name.clone()); + + let trait_generics = vecmap(&the_trait.generics, |generic| { + let name = Path::from_single(generic.name.as_ref().clone(), generic.span); + UnresolvedTypeData::Named(name, Vec::new(), false).with_span(generic.span) + }); + + let trait_id = Some(trait_id); + Ok(Value::TraitConstraint(TraitBound { trait_path, trait_id, trait_generics })) +} diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs new file mode 100644 index 00000000000..fc8c57ab634 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs @@ -0,0 +1,48 @@ +use acvm::BlackBoxFunctionSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; +use iter_extended::try_vecmap; +use noirc_errors::Location; + +use crate::{ + hir::comptime::{errors::IResult, interpreter::builtin::get_field, InterpreterError, Value}, + macros_api::NodeInterner, +}; + +use super::builtin::{check_argument_count, get_array, get_u32}; + +pub(super) fn call_foreign( + interner: &mut NodeInterner, + name: &str, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + match name { + "poseidon2_permutation" => poseidon2_permutation(interner, arguments, location), + _ => { + let item = format!("Comptime evaluation for builtin function {name}"); + Err(InterpreterError::Unimplemented { item, location }) + } + } +} + +// poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] +fn poseidon2_permutation( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(2, &arguments, location)?; + + let state_length = get_u32(arguments.pop().unwrap().0, location)?; + let (input, typ) = get_array(interner, arguments.pop().unwrap().0, location)?; + + let input = try_vecmap(input, |integer| get_field(integer, location))?; + + // Currently locked to only bn254! + let fields = Bn254BlackBoxSolver + .poseidon2_permutation(&input, state_length) + .map_err(|error| InterpreterError::BlackBoxError(error, location))?; + + let array = fields.into_iter().map(Value::Field).collect(); + Ok(Value::Array(array, typ)) +} diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs index a1ceb27afb2..94a848b891d 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs @@ -7,7 +7,7 @@ use crate::{ use super::Interpreter; -impl<'a> Interpreter<'a> { +impl<'local, 'interner> Interpreter<'local, 'interner> { /// Evaluates any expressions within UnquoteMarkers in the given token list /// and replaces the expression held by the marker with the evaluated value /// in expression form. @@ -27,7 +27,8 @@ impl<'a> Interpreter<'a> { // turning it into a Quoted block (which would add `quote`, `{`, and `}` tokens). Value::Code(stream) => new_tokens.extend(unwrap_rc(stream).0), value => { - let new_id = value.into_hir_expression(self.interner, location)?; + let new_id = + value.into_hir_expression(self.elaborator.interner, location)?; let new_token = Token::UnquoteMarker(new_id); new_tokens.push(SpannedToken::new(new_token, span)); } diff --git a/compiler/noirc_frontend/src/hir/comptime/mod.rs b/compiler/noirc_frontend/src/hir/comptime/mod.rs index 3cc7b5f7e98..16090c64174 100644 --- a/compiler/noirc_frontend/src/hir/comptime/mod.rs +++ b/compiler/noirc_frontend/src/hir/comptime/mod.rs @@ -1,7 +1,6 @@ mod errors; mod hir_to_display_ast; mod interpreter; -mod scan; mod tests; mod value; diff --git a/compiler/noirc_frontend/src/hir/comptime/scan.rs b/compiler/noirc_frontend/src/hir/comptime/scan.rs deleted file mode 100644 index f9cc54ef9e4..00000000000 --- a/compiler/noirc_frontend/src/hir/comptime/scan.rs +++ /dev/null @@ -1,271 +0,0 @@ -//! This module is for the scanning of the Hir by the interpreter. -//! In this initial step, the Hir is scanned for `Comptime` nodes -//! without actually executing anything until such a node is found. -//! Once such a node is found, the interpreter will call the relevant -//! evaluate method on that node type, insert the result into the Ast, -//! and continue scanning the rest of the program. -//! -//! Since it mostly just needs to recur on the Hir looking for Comptime -//! nodes, this pass is fairly simple. The only thing it really needs to -//! ensure to do is to push and pop scopes on the interpreter as needed -//! so that any variables defined within e.g. an `if` statement containing -//! a `Comptime` block aren't accessible outside of the `if`. -use crate::{ - hir_def::{ - expr::{ - HirArrayLiteral, HirBlockExpression, HirCallExpression, HirConstructorExpression, - HirIdent, HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, - HirMethodCallExpression, - }, - stmt::HirForStatement, - }, - macros_api::{HirExpression, HirLiteral, HirStatement}, - node_interner::{DefinitionKind, ExprId, FuncId, GlobalId, StmtId}, -}; - -use super::{ - errors::{IResult, InterpreterError}, - interpreter::Interpreter, - Value, -}; - -use noirc_errors::Location; - -#[allow(dead_code)] -impl<'interner> Interpreter<'interner> { - /// Scan through a function, evaluating any Comptime nodes found. - /// These nodes will be modified in place, replaced with the - /// result of their evaluation. - pub fn scan_function(&mut self, function: FuncId) -> IResult<()> { - // Don't scan through functions that are already comptime. They may use comptime-only - // features (most likely HirExpression::Quote) that we'd otherwise error for. - if self.interner.function_modifiers(&function).is_comptime { - return Ok(()); - } - - let function = self.interner.function(&function); - - let state = self.enter_function(); - self.scan_expression(function.as_expr())?; - self.exit_function(state); - Ok(()) - } - - /// Evaluate this global if it is a comptime global. - /// Otherwise, scan through its expression for any comptime blocks to evaluate. - pub fn scan_global(&mut self, global: GlobalId) -> IResult<()> { - if let Some(let_) = self.interner.get_global_let_statement(global) { - if let_.comptime { - self.evaluate_let(let_)?; - } else { - self.scan_expression(let_.expression)?; - } - } - Ok(()) - } - - fn scan_expression(&mut self, expr: ExprId) -> IResult<()> { - match self.interner.expression(&expr) { - HirExpression::Ident(ident, _) => self.scan_ident(ident, expr), - HirExpression::Literal(literal) => self.scan_literal(literal), - HirExpression::Block(block) => self.scan_block(block), - HirExpression::Prefix(prefix) => self.scan_expression(prefix.rhs), - HirExpression::Infix(infix) => self.scan_infix(infix), - HirExpression::Index(index) => self.scan_index(index), - HirExpression::Constructor(constructor) => self.scan_constructor(constructor), - HirExpression::MemberAccess(member_access) => self.scan_expression(member_access.lhs), - HirExpression::Call(call) => self.scan_call(call), - HirExpression::MethodCall(method_call) => self.scan_method_call(method_call), - HirExpression::Cast(cast) => self.scan_expression(cast.lhs), - HirExpression::If(if_) => self.scan_if(if_), - HirExpression::Tuple(tuple) => self.scan_tuple(tuple), - HirExpression::Lambda(lambda) => self.scan_lambda(lambda), - HirExpression::Comptime(block) => { - let location = self.interner.expr_location(&expr); - let new_expr_id = - self.evaluate_block(block)?.into_hir_expression(self.interner, location)?; - let new_expr = self.interner.expression(&new_expr_id); - self.debug_comptime(new_expr_id, location); - self.interner.replace_expr(&expr, new_expr); - Ok(()) - } - HirExpression::Quote(_) => { - // This error could be detected much earlier in the compiler pipeline but - // it just makes sense for the comptime code to handle comptime things. - let location = self.interner.expr_location(&expr); - Err(InterpreterError::QuoteInRuntimeCode { location }) - } - HirExpression::Error => Ok(()), - - // Unquote should only be inserted by the comptime interpreter while expanding macros - // and is removed by the Hir -> Ast conversion pass which converts it into a normal block. - // If we find one now during scanning it most likely means the Hir -> Ast conversion - // missed it somehow. In the future we may allow users to manually write unquote - // expressions in their code but for now this is unreachable. - HirExpression::Unquote(block) => { - unreachable!("Found unquote block while scanning: {block:?}") - } - } - } - - // Identifiers have no code to execute but we may need to inline any values - // of comptime variables into runtime code. - fn scan_ident(&mut self, ident: HirIdent, id: ExprId) -> IResult<()> { - let definition = self.interner.definition(ident.id); - - match &definition.kind { - DefinitionKind::Function(_) => Ok(()), - _ => { - // Opportunistically evaluate this identifier to see if it is compile-time known. - // If so, inline its value. - if let Ok(value) = self.evaluate_ident(ident, id) { - // TODO(#4922): Inlining closures is currently unimplemented - if !matches!(value, Value::Closure(..)) { - let new_expr = self.inline_expression(value, id)?; - let location = self.interner.id_location(id); - self.debug_comptime(new_expr, location); - } - } - Ok(()) - } - } - } - - fn scan_literal(&mut self, literal: HirLiteral) -> IResult<()> { - match literal { - HirLiteral::Array(elements) | HirLiteral::Slice(elements) => match elements { - HirArrayLiteral::Standard(elements) => { - for element in elements { - self.scan_expression(element)?; - } - Ok(()) - } - HirArrayLiteral::Repeated { repeated_element, length: _ } => { - self.scan_expression(repeated_element) - } - }, - HirLiteral::Bool(_) - | HirLiteral::Integer(_, _) - | HirLiteral::Str(_) - | HirLiteral::FmtStr(_, _) - | HirLiteral::Unit => Ok(()), - } - } - - fn scan_block(&mut self, block: HirBlockExpression) -> IResult<()> { - self.push_scope(); - for statement in &block.statements { - self.scan_statement(*statement)?; - } - self.pop_scope(); - Ok(()) - } - - fn scan_infix(&mut self, infix: HirInfixExpression) -> IResult<()> { - self.scan_expression(infix.lhs)?; - self.scan_expression(infix.rhs) - } - - fn scan_index(&mut self, index: HirIndexExpression) -> IResult<()> { - self.scan_expression(index.collection)?; - self.scan_expression(index.index) - } - - fn scan_constructor(&mut self, constructor: HirConstructorExpression) -> IResult<()> { - for (_, field) in constructor.fields { - self.scan_expression(field)?; - } - Ok(()) - } - - fn scan_call(&mut self, call: HirCallExpression) -> IResult<()> { - self.scan_expression(call.func)?; - for arg in call.arguments { - self.scan_expression(arg)?; - } - Ok(()) - } - - fn scan_method_call(&mut self, method_call: HirMethodCallExpression) -> IResult<()> { - self.scan_expression(method_call.object)?; - for arg in method_call.arguments { - self.scan_expression(arg)?; - } - Ok(()) - } - - fn scan_if(&mut self, if_: HirIfExpression) -> IResult<()> { - self.scan_expression(if_.condition)?; - - self.push_scope(); - self.scan_expression(if_.consequence)?; - self.pop_scope(); - - if let Some(alternative) = if_.alternative { - self.push_scope(); - self.scan_expression(alternative)?; - self.pop_scope(); - } - Ok(()) - } - - fn scan_tuple(&mut self, tuple: Vec) -> IResult<()> { - for field in tuple { - self.scan_expression(field)?; - } - Ok(()) - } - - fn scan_lambda(&mut self, lambda: HirLambda) -> IResult<()> { - self.scan_expression(lambda.body) - } - - fn scan_statement(&mut self, statement: StmtId) -> IResult<()> { - match self.interner.statement(&statement) { - HirStatement::Let(let_) => self.scan_expression(let_.expression), - HirStatement::Constrain(constrain) => self.scan_expression(constrain.0), - HirStatement::Assign(assign) => self.scan_expression(assign.expression), - HirStatement::For(for_) => self.scan_for(for_), - HirStatement::Break => Ok(()), - HirStatement::Continue => Ok(()), - HirStatement::Expression(expression) => self.scan_expression(expression), - HirStatement::Semi(semi) => self.scan_expression(semi), - HirStatement::Error => Ok(()), - HirStatement::Comptime(comptime) => { - let location = self.interner.statement_location(comptime); - let new_expr = self - .evaluate_comptime(comptime)? - .into_hir_expression(self.interner, location)?; - self.debug_comptime(new_expr, location); - self.interner.replace_statement(statement, HirStatement::Expression(new_expr)); - Ok(()) - } - } - } - - fn scan_for(&mut self, for_: HirForStatement) -> IResult<()> { - // We don't need to set self.in_loop since we're not actually evaluating this loop. - // We just need to push a scope so that if there's a `comptime { .. }` expr inside this - // loop, any variables it defines aren't accessible outside of it. - self.push_scope(); - self.scan_expression(for_.block)?; - self.pop_scope(); - Ok(()) - } - - fn inline_expression(&mut self, value: Value, expr: ExprId) -> IResult { - let location = self.interner.expr_location(&expr); - let new_expr_id = value.into_hir_expression(self.interner, location)?; - let new_expr = self.interner.expression(&new_expr_id); - self.interner.replace_expr(&expr, new_expr); - Ok(new_expr_id) - } - - fn debug_comptime(&mut self, expr: ExprId, location: Location) { - if Some(location.file) == self.debug_comptime_in_file { - let expr = expr.to_display_ast(self.interner); - self.debug_comptime_evaluations - .push(InterpreterError::debug_evaluate_comptime(expr, location)); - } - } -} diff --git a/compiler/noirc_frontend/src/hir/comptime/tests.rs b/compiler/noirc_frontend/src/hir/comptime/tests.rs index e8e05506c94..b4ffa1bd01d 100644 --- a/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -1,46 +1,73 @@ #![cfg(test)] -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; +use std::path::PathBuf; +use fm::{FileId, FileManager}; +use noirc_arena::Index; use noirc_errors::Location; use super::errors::InterpreterError; -use super::interpreter::Interpreter; use super::value::Value; -use crate::graph::CrateId; -use crate::hir::type_check::test::type_check_src_code; - -fn interpret_helper(src: &str, func_namespace: Vec) -> Result { - let (mut interner, main_id) = type_check_src_code(src, func_namespace); - let mut scopes = vec![HashMap::default()]; - let no_debug_evaluate_comptime = None; - let mut interpreter_errors = vec![]; - let mut interpreter = Interpreter::new( - &mut interner, - &mut scopes, - CrateId::Root(0), - no_debug_evaluate_comptime, - &mut interpreter_errors, - ); +use crate::elaborator::Elaborator; +use crate::hir::def_collector::dc_crate::DefCollector; +use crate::hir::def_collector::dc_mod::collect_defs; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData}; +use crate::hir::{Context, ParsedFiles}; +use crate::parser::parse_program; + +fn interpret_helper(src: &str) -> Result { + let file = FileId::default(); + + // Can't use Index::test_new here for some reason, even with #[cfg(test)]. + let module_id = LocalModuleId(Index::unsafe_zeroed()); + let mut modules = noirc_arena::Arena::default(); + let location = Location::new(Default::default(), file); + let root = LocalModuleId(modules.insert(ModuleData::new(None, location, false))); + assert_eq!(root, module_id); + + let file_manager = FileManager::new(&PathBuf::new()); + let parsed_files = ParsedFiles::new(); + let mut context = Context::new(file_manager, parsed_files); + context.def_interner.populate_dummy_operator_traits(); + + let krate = context.crate_graph.add_crate_root(FileId::dummy()); + + let (module, errors) = parse_program(src); + assert_eq!(errors.len(), 0); + let ast = module.into_sorted(); + + let def_map = CrateDefMap { root: module_id, modules, krate, extern_prelude: BTreeMap::new() }; + let mut collector = DefCollector::new(def_map); + + collect_defs(&mut collector, ast, FileId::dummy(), module_id, krate, &mut context, &[]); + context.def_maps.insert(krate, collector.def_map); + + let main = context.get_main_function(&krate).expect("Expected 'main' function"); + let mut elaborator = + Elaborator::elaborate_and_return_self(&mut context, krate, collector.items, None); + assert_eq!(elaborator.errors.len(), 0); + + let mut interpreter = elaborator.setup_interpreter(); let no_location = Location::dummy(); - interpreter.call_function(main_id, Vec::new(), HashMap::new(), no_location) + interpreter.call_function(main, Vec::new(), HashMap::new(), no_location) } -fn interpret(src: &str, func_namespace: Vec) -> Value { - interpret_helper(src, func_namespace).unwrap_or_else(|error| { +fn interpret(src: &str) -> Value { + interpret_helper(src).unwrap_or_else(|error| { panic!("Expected interpreter to exit successfully, but found {error:?}") }) } -fn interpret_expect_error(src: &str, func_namespace: Vec) -> InterpreterError { - interpret_helper(src, func_namespace).expect_err("Expected interpreter to error") +fn interpret_expect_error(src: &str) -> InterpreterError { + interpret_helper(src).expect_err("Expected interpreter to error") } #[test] fn interpreter_works() { let program = "comptime fn main() -> pub Field { 3 }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::Field(3u128.into())); } @@ -51,7 +78,7 @@ fn mutation_works() { x = 4; x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::I8(4)); } @@ -62,7 +89,7 @@ fn mutating_references() { *x = 4; *x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::I32(4)); } @@ -73,10 +100,22 @@ fn mutating_mutable_references() { *x = 4; *x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::I64(4)); } +#[test] +fn mutation_leaks() { + let program = "comptime fn main() -> pub i8 { + let mut x = 3; + let y = &mut x; + *y = 5; + x + }"; + let result = interpret(program); + assert_eq!(result, Value::I8(5)); +} + #[test] fn mutating_arrays() { let program = "comptime fn main() -> pub u8 { @@ -84,7 +123,7 @@ fn mutating_arrays() { a1[1] = 22; a1[1] }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::U8(22)); } @@ -98,7 +137,7 @@ fn mutate_in_new_scope() { } x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::U8(2)); } @@ -111,7 +150,7 @@ fn for_loop() { } x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::U8(15)); } @@ -124,7 +163,7 @@ fn for_loop_u16() { } x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::U16(15)); } @@ -140,7 +179,7 @@ fn for_loop_with_break() { } x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::U32(6)); } @@ -156,7 +195,7 @@ fn for_loop_with_continue() { } x }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::U64(11)); } @@ -165,7 +204,7 @@ fn assert() { let program = "comptime fn main() { assert(1 == 1); }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert_eq!(result, Value::Unit); } @@ -174,7 +213,7 @@ fn assert_fail() { let program = "comptime fn main() { assert(1 == 2); }"; - let result = interpret_expect_error(program, vec!["main".into()]); + let result = interpret_expect_error(program); assert!(matches!(result, InterpreterError::FailingConstraint { .. })); } @@ -184,7 +223,7 @@ fn lambda() { let f = |x: u8| x + 1; f(1) }"; - let result = interpret(program, vec!["main".into()]); + let result = interpret(program); assert!(matches!(result, Value::U8(2))); } @@ -202,21 +241,21 @@ fn non_deterministic_recursion() { fib(x - 1) + fib(x - 2) } }"; - let result = interpret(program, vec!["main".into(), "fib".into()]); + let result = interpret(program); assert_eq!(result, Value::U64(55)); } #[test] fn generic_functions() { let program = " - fn main() -> pub u8 { + comptime fn main() -> pub u8 { apply(1, |x| x + 1) } - fn apply(x: T, f: fn[Env](T) -> U) -> U { + comptime fn apply(x: T, f: fn[Env](T) -> U) -> U { f(x) } "; - let result = interpret(program, vec!["main".into(), "apply".into()]); + let result = interpret(program); assert!(matches!(result, Value::U8(2))); } diff --git a/compiler/noirc_frontend/src/hir/comptime/value.rs b/compiler/noirc_frontend/src/hir/comptime/value.rs index 9e15b73324f..f29b67bfc4e 100644 --- a/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -7,13 +7,14 @@ use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use crate::{ - ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness}, + ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness, TraitBound}, + hir::def_map::ModuleId, hir_def::expr::{HirArrayLiteral, HirConstructorExpression, HirIdent, HirLambda, ImplKind}, macros_api::{ Expression, ExpressionKind, HirExpression, HirLiteral, Literal, NodeInterner, Path, StructId, }, - node_interner::{ExprId, FuncId}, + node_interner::{ExprId, FuncId, TraitId}, parser::{self, NoirParser, TopLevelStatement}, token::{SpannedToken, Token, Tokens}, QuotedType, Shared, Type, TypeBindings, @@ -31,6 +32,7 @@ pub enum Value { I16(i16), I32(i32), I64(i64), + U1(bool), U8(u8), U16(u16), U32(u32), @@ -40,11 +42,16 @@ pub enum Value { Closure(HirLambda, Vec, Type), Tuple(Vec), Struct(HashMap, Value>, Type), - Pointer(Shared), + Pointer(Shared, /* auto_deref */ bool), Array(Vector, Type), Slice(Vector, Type), Code(Rc), StructDefinition(StructId), + TraitConstraint(TraitBound), + TraitDefinition(TraitId), + FunctionDefinition(FuncId), + ModuleDefinition(ModuleId), + Zeroed(Type), } impl Value { @@ -57,6 +64,7 @@ impl Value { Value::I16(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Sixteen), Value::I32(_) => Type::Integer(Signedness::Signed, IntegerBitSize::ThirtyTwo), Value::I64(_) => Type::Integer(Signedness::Signed, IntegerBitSize::SixtyFour), + Value::U1(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::One), Value::U8(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), Value::U16(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Sixteen), Value::U32(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo), @@ -75,10 +83,19 @@ impl Value { Value::Slice(_, typ) => return Cow::Borrowed(typ), Value::Code(_) => Type::Quoted(QuotedType::Quoted), Value::StructDefinition(_) => Type::Quoted(QuotedType::StructDefinition), - Value::Pointer(element) => { - let element = element.borrow().get_type().into_owned(); - Type::MutableReference(Box::new(element)) + Value::Pointer(element, auto_deref) => { + if *auto_deref { + element.borrow().get_type().into_owned() + } else { + let element = element.borrow().get_type().into_owned(); + Type::MutableReference(Box::new(element)) + } } + Value::TraitConstraint { .. } => Type::Quoted(QuotedType::TraitConstraint), + Value::TraitDefinition(_) => Type::Quoted(QuotedType::TraitDefinition), + Value::FunctionDefinition(_) => Type::Quoted(QuotedType::FunctionDefinition), + Value::ModuleDefinition(_) => Type::Quoted(QuotedType::Module), + Value::Zeroed(typ) => return Cow::Borrowed(typ), }) } @@ -115,6 +132,9 @@ impl Value { let value = (value as u128).into(); ExpressionKind::Literal(Literal::Integer(value, negative)) } + Value::U1(value) => { + ExpressionKind::Literal(Literal::Integer((value as u128).into(), false)) + } Value::U8(value) => { ExpressionKind::Literal(Literal::Integer((value as u128).into(), false)) } @@ -192,7 +212,13 @@ impl Value { } }; } - Value::Pointer(_) | Value::StructDefinition(_) => { + Value::Pointer(..) + | Value::StructDefinition(_) + | Value::TraitConstraint(_) + | Value::TraitDefinition(_) + | Value::FunctionDefinition(_) + | Value::Zeroed(_) + | Value::ModuleDefinition(_) => { return Err(InterpreterError::CannotInlineMacro { value: self, location }) } }; @@ -235,6 +261,9 @@ impl Value { let value = (value as u128).into(); HirExpression::Literal(HirLiteral::Integer(value, negative)) } + Value::U1(value) => { + HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) + } Value::U8(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } @@ -298,7 +327,13 @@ impl Value { HirExpression::Literal(HirLiteral::Slice(HirArrayLiteral::Standard(elements))) } Value::Code(block) => HirExpression::Unquote(unwrap_rc(block)), - Value::Pointer(_) | Value::StructDefinition(_) => { + Value::Pointer(..) + | Value::StructDefinition(_) + | Value::TraitConstraint(_) + | Value::TraitDefinition(_) + | Value::FunctionDefinition(_) + | Value::Zeroed(_) + | Value::ModuleDefinition(_) => { return Err(InterpreterError::CannotInlineMacro { value: self, location }) } }; @@ -366,6 +401,7 @@ impl Display for Value { Value::I16(value) => write!(f, "{value}"), Value::I32(value) => write!(f, "{value}"), Value::I64(value) => write!(f, "{value}"), + Value::U1(value) => write!(f, "{value}"), Value::U8(value) => write!(f, "{value}"), Value::U16(value) => write!(f, "{value}"), Value::U32(value) => write!(f, "{value}"), @@ -385,7 +421,7 @@ impl Display for Value { let fields = vecmap(fields, |(name, value)| format!("{}: {}", name, value)); write!(f, "{typename} {{ {} }}", fields.join(", ")) } - Value::Pointer(value) => write!(f, "&mut {}", value.borrow()), + Value::Pointer(value, _) => write!(f, "&mut {}", value.borrow()), Value::Array(values, _) => { let values = vecmap(values, ToString::to_string); write!(f, "[{}]", values.join(", ")) @@ -402,6 +438,11 @@ impl Display for Value { write!(f, " }}") } Value::StructDefinition(_) => write!(f, "(struct definition)"), + Value::TraitConstraint { .. } => write!(f, "(trait constraint)"), + Value::TraitDefinition(_) => write!(f, "(trait definition)"), + Value::FunctionDefinition(_) => write!(f, "(function definition)"), + Value::ModuleDefinition(_) => write!(f, "(module)"), + Value::Zeroed(typ) => write!(f, "(zeroed {typ})"), } } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index b474ccff0cc..80186c19c76 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -2,20 +2,14 @@ use super::dc_mod::collect_defs; use super::errors::{DefCollectorErrorKind, DuplicateType}; use crate::elaborator::Elaborator; use crate::graph::CrateId; -use crate::hir::comptime::{Interpreter, InterpreterError}; +use crate::hir::comptime::InterpreterError; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::{ResolvedGeneric, Type}; +use crate::hir::resolution::path_resolver; +use crate::hir::type_check::TypeCheckError; +use crate::{Generics, Type}; use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; -use crate::hir::resolution::{ - collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, - resolve_impls, resolve_structs, resolve_trait_by_path, resolve_trait_impls, resolve_traits, - resolve_type_aliases, -}; -use crate::hir::type_check::{ - check_trait_impl_method_matches_declaration, type_check_func, TypeCheckError, TypeChecker, -}; use crate::hir::Context; use crate::macros_api::{MacroError, MacroProcessor}; @@ -39,16 +33,6 @@ use std::fmt::Write; use std::path::PathBuf; use std::vec; -#[derive(Default)] -pub struct ResolvedModule { - pub globals: Vec<(FileId, GlobalId)>, - pub functions: Vec<(FileId, FuncId)>, - pub trait_impl_functions: Vec<(FileId, FuncId)>, - pub debug_comptime_in_file: Option, - - pub errors: Vec<(CompilationError, FileId)>, -} - /// Stores all of the unresolved functions in a particular file/mod #[derive(Clone)] pub struct UnresolvedFunctions { @@ -68,35 +52,6 @@ impl UnresolvedFunctions { pub fn function_ids(&self) -> Vec { vecmap(&self.functions, |(_, id, _)| *id) } - - pub fn resolve_trait_bounds_trait_ids( - &mut self, - def_maps: &BTreeMap, - crate_id: CrateId, - ) -> Vec { - let mut errors = Vec::new(); - - for (local_id, _, func) in &mut self.functions { - let module = ModuleId { krate: crate_id, local_id: *local_id }; - - for bound in &mut func.def.where_clause { - match resolve_trait_by_path(def_maps, module, bound.trait_bound.trait_path.clone()) - { - Ok((trait_id, warning)) => { - bound.trait_bound.trait_id = Some(trait_id); - if let Some(warning) = warning { - errors.push(DefCollectorErrorKind::PathResolutionError(warning)); - } - } - Err(err) => { - errors.push(err); - } - } - } - } - - errors - } } pub struct UnresolvedStruct { @@ -124,12 +79,13 @@ pub struct UnresolvedTraitImpl { pub methods: UnresolvedFunctions, pub generics: UnresolvedGenerics, pub where_clause: Vec, + pub is_comptime: bool, // Every field after this line is filled in later in the elaborator pub trait_id: Option, pub impl_id: Option, pub resolved_object_type: Option, - pub resolved_generics: Vec, + pub resolved_generics: Generics, // The resolved generic on the trait itself. E.g. it is the `` in // `impl Foo for Bar { ... }` @@ -160,11 +116,11 @@ pub struct DefCollector { #[derive(Default)] pub struct CollectedItems { - pub(crate) functions: Vec, + pub functions: Vec, pub(crate) types: BTreeMap, pub(crate) type_aliases: BTreeMap, pub(crate) traits: BTreeMap, - pub(crate) globals: Vec, + pub globals: Vec, pub(crate) impls: ImplMap, pub(crate) trait_impls: Vec, } @@ -201,10 +157,16 @@ pub enum CompilationError { DebugComptimeScopeNotFound(Vec), } -impl CompilationError { - fn is_error(&self) -> bool { - let diagnostic = CustomDiagnostic::from(self); - diagnostic.is_error() +impl std::fmt::Display for CompilationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CompilationError::ParseError(error) => write!(f, "{}", error), + CompilationError::DefinitionError(error) => write!(f, "{}", error), + CompilationError::ResolverError(error) => write!(f, "{}", error), + CompilationError::TypeError(error) => write!(f, "{}", error), + CompilationError::InterpreterError(error) => write!(f, "{:?}", error), + CompilationError::DebugComptimeScopeNotFound(error) => write!(f, "{:?}", error), + } } } @@ -260,7 +222,7 @@ impl From for CompilationError { } impl DefCollector { - fn new(def_map: CrateDefMap) -> DefCollector { + pub fn new(def_map: CrateDefMap) -> DefCollector { DefCollector { def_map, imports: vec![], @@ -279,12 +241,11 @@ impl DefCollector { /// Collect all of the definitions in a given crate into a CrateDefMap /// Modules which are not a part of the module hierarchy starting with /// the root module, will be ignored. - pub fn collect( + pub fn collect_crate_and_dependencies( mut def_map: CrateDefMap, context: &mut Context, ast: SortedModule, root_file_id: FileId, - use_legacy: bool, debug_comptime_in_file: Option<&str>, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { @@ -302,7 +263,6 @@ impl DefCollector { errors.extend(CrateDefMap::collect_defs( dep.crate_id, context, - use_legacy, debug_comptime_in_file, macro_processors, )); @@ -347,7 +307,7 @@ impl DefCollector { for collected_import in std::mem::take(&mut def_collector.imports) { let module_id = collected_import.module_id; let resolved_import = if context.def_interner.track_references { - let mut references: Vec = Vec::new(); + let mut references: Vec> = Vec::new(); let resolved_import = resolve_import( crate_id, &collected_import, @@ -359,9 +319,14 @@ impl DefCollector { let file_id = current_def_map.file_id(module_id); for (referenced, ident) in references.iter().zip(&collected_import.path.segments) { - let reference = - ReferenceId::Reference(Location::new(ident.span(), file_id), false); - context.def_interner.add_reference(*referenced, reference); + let Some(referenced) = referenced else { + continue; + }; + context.def_interner.add_reference( + *referenced, + Location::new(ident.span(), file_id), + false, + ); } resolved_import @@ -412,125 +377,27 @@ impl DefCollector { } } - let handle_missing_file = |err| { - errors.push((CompilationError::DebugComptimeScopeNotFound(err), root_file_id)); - None - }; - let debug_comptime_in_file: Option = - debug_comptime_in_file.and_then(|debug_comptime_in_file| { - context - .file_manager - .find_by_path_suffix(debug_comptime_in_file) - .unwrap_or_else(handle_missing_file) - }); - - if !use_legacy { - let mut more_errors = Elaborator::elaborate( - context, - crate_id, - def_collector.items, - debug_comptime_in_file, - ); - errors.append(&mut more_errors); - return errors; - } - - let mut resolved_module = - ResolvedModule { errors, debug_comptime_in_file, ..Default::default() }; - - // We must first resolve and intern the globals before we can resolve any stmts inside each function. - // Each function uses its own resolver with a newly created ScopeForest, and must be resolved again to be within a function's scope - // - // Additionally, we must resolve integer globals before structs since structs may refer to - // the values of integer globals as numeric generics. - let (literal_globals, other_globals) = filter_literal_globals(def_collector.items.globals); - - resolved_module.resolve_globals(context, literal_globals, crate_id); - - resolved_module.errors.extend(resolve_type_aliases( - context, - def_collector.items.type_aliases, - crate_id, - )); - - resolved_module.errors.extend(resolve_traits( - context, - def_collector.items.traits, - crate_id, - )); - - // Must resolve structs before we resolve globals. - resolved_module.errors.extend(resolve_structs( - context, - def_collector.items.types, - crate_id, - )); - - // Bind trait impls to their trait. Collect trait functions, that have a - // default implementation, which hasn't been overridden. - resolved_module.errors.extend(collect_trait_impls( - context, - crate_id, - &mut def_collector.items.trait_impls, - )); - - // Before we resolve any function symbols we must go through our impls and - // re-collect the methods within into their proper module. This cannot be - // done before resolution since we need to be able to resolve the type of the - // impl since that determines the module we should collect into. - // - // These are resolved after trait impls so that struct methods are chosen - // over trait methods if there are name conflicts. - resolved_module.errors.extend(collect_impls(context, crate_id, &def_collector.items.impls)); - - // We must wait to resolve non-integer globals until after we resolve structs since struct - // globals will need to reference the struct type they're initialized to ensure they are valid. - resolved_module.resolve_globals(context, other_globals, crate_id); - - // Resolve each function in the crate. This is now possible since imports have been resolved - resolved_module.functions = resolve_free_functions( - &mut context.def_interner, - crate_id, - &context.def_maps, - def_collector.items.functions, - None, - &mut resolved_module.errors, - ); - - resolved_module.functions.extend(resolve_impls( - &mut context.def_interner, - crate_id, - &context.def_maps, - def_collector.items.impls, - &mut resolved_module.errors, - )); + let debug_comptime_in_file = debug_comptime_in_file.and_then(|debug_comptime_in_file| { + let file = context.file_manager.find_by_path_suffix(debug_comptime_in_file); + file.unwrap_or_else(|error| { + errors.push((CompilationError::DebugComptimeScopeNotFound(error), root_file_id)); + None + }) + }); - resolved_module.trait_impl_functions = resolve_trait_impls( - context, - def_collector.items.trait_impls, - crate_id, - &mut resolved_module.errors, - ); + let mut more_errors = + Elaborator::elaborate(context, crate_id, def_collector.items, debug_comptime_in_file); + errors.append(&mut more_errors); for macro_processor in macro_processors { macro_processor.process_typed_ast(&crate_id, context).unwrap_or_else( |(macro_err, file_id)| { - resolved_module.errors.push((macro_err.into(), file_id)); + errors.push((macro_err.into(), file_id)); }, ); } - let cycle_errors = context.def_interner.check_for_dependency_cycles(); - let cycles_present = !cycle_errors.is_empty(); - resolved_module.errors.extend(cycle_errors); - - resolved_module.type_check(context); - - if !cycles_present { - resolved_module.evaluate_comptime(&mut context.def_interner, crate_id); - } - - resolved_module.errors + errors } } @@ -545,18 +412,8 @@ fn add_import_reference( return; } - let referenced = match def_id { - crate::macros_api::ModuleDefId::ModuleId(module_id) => ReferenceId::Module(module_id), - crate::macros_api::ModuleDefId::FunctionId(func_id) => ReferenceId::Function(func_id), - crate::macros_api::ModuleDefId::TypeId(struct_id) => ReferenceId::Struct(struct_id), - crate::macros_api::ModuleDefId::TraitId(trait_id) => ReferenceId::Trait(trait_id), - crate::macros_api::ModuleDefId::TypeAliasId(type_alias_id) => { - ReferenceId::Alias(type_alias_id) - } - crate::macros_api::ModuleDefId::GlobalId(global_id) => ReferenceId::Global(global_id), - }; - let reference = ReferenceId::Reference(Location::new(name.span(), file_id), false); - interner.add_reference(referenced, reference); + let location = Location::new(name.span(), file_id); + interner.add_module_def_id_reference(def_id, location, false); } fn inject_prelude( @@ -616,86 +473,3 @@ pub fn filter_literal_globals( _ => false, }) } - -impl ResolvedModule { - fn type_check(&mut self, context: &mut Context) { - self.type_check_globals(&mut context.def_interner); - self.type_check_functions(&mut context.def_interner); - self.type_check_trait_impl_function(&mut context.def_interner); - } - - fn type_check_globals(&mut self, interner: &mut NodeInterner) { - for (file_id, global_id) in self.globals.iter() { - for error in TypeChecker::check_global(*global_id, interner) { - self.errors.push((error.into(), *file_id)); - } - } - } - - fn type_check_functions(&mut self, interner: &mut NodeInterner) { - for (file, func) in self.functions.iter() { - for error in type_check_func(interner, *func) { - self.errors.push((error.into(), *file)); - } - } - } - - fn type_check_trait_impl_function(&mut self, interner: &mut NodeInterner) { - for (file, func) in self.trait_impl_functions.iter() { - for error in check_trait_impl_method_matches_declaration(interner, *func) { - self.errors.push((error.into(), *file)); - } - for error in type_check_func(interner, *func) { - self.errors.push((error.into(), *file)); - } - } - } - - /// Evaluate all `comptime` expressions in this module - fn evaluate_comptime(&mut self, interner: &mut NodeInterner, crate_id: CrateId) { - if self.count_errors() == 0 { - let mut scopes = vec![HashMap::default()]; - let mut interpreter_errors = vec![]; - let mut interpreter = Interpreter::new( - interner, - &mut scopes, - crate_id, - self.debug_comptime_in_file, - &mut interpreter_errors, - ); - - for (_file, global) in &self.globals { - if let Err(error) = interpreter.scan_global(*global) { - self.errors.push(error.into_compilation_error_pair()); - } - } - - for (_file, function) in &self.functions { - // The file returned by the error may be different than the file the - // function is in so only use the error's file id. - if let Err(error) = interpreter.scan_function(*function) { - self.errors.push(error.into_compilation_error_pair()); - } - } - self.errors.extend( - interpreter_errors.into_iter().map(InterpreterError::into_compilation_error_pair), - ); - } - } - - fn resolve_globals( - &mut self, - context: &mut Context, - literal_globals: Vec, - crate_id: CrateId, - ) { - let globals = resolve_globals(context, literal_globals, crate_id); - self.globals.extend(globals.globals); - self.errors.extend(globals.errors); - } - - /// Counts the number of errors (minus warnings) this program currently has - fn count_errors(&self) -> usize { - self.errors.iter().filter(|(error, _)| error.is_error()).count() - } -} diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 48985116f4f..e5893dc43d5 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -14,7 +14,7 @@ use crate::ast::{ TypeImpl, }; use crate::macros_api::NodeInterner; -use crate::node_interner::ReferenceId; +use crate::node_interner::{ModuleAttributes, ReferenceId}; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, @@ -84,13 +84,13 @@ pub fn collect_defs( }); } - errors.extend(collector.collect_globals(context, ast.globals)); + errors.extend(collector.collect_globals(context, ast.globals, crate_id)); errors.extend(collector.collect_traits(context, ast.traits, crate_id)); errors.extend(collector.collect_structs(context, ast.types, crate_id)); - errors.extend(collector.collect_type_aliases(context, ast.type_aliases)); + errors.extend(collector.collect_type_aliases(context, ast.type_aliases, crate_id)); errors.extend(collector.collect_functions(context, ast.functions, crate_id)); @@ -106,6 +106,7 @@ impl<'a> ModCollector<'a> { &mut self, context: &mut Context, globals: Vec, + crate_id: CrateId, ) -> Vec<(CompilationError, fm::FileId)> { let mut errors = vec![]; for global in globals { @@ -115,6 +116,7 @@ impl<'a> ModCollector<'a> { global, self.file_id, self.module_id, + crate_id, ); if let Some(error) = error { @@ -171,6 +173,7 @@ impl<'a> ModCollector<'a> { let module = ModuleId { krate, local_id: self.module_id }; for (_, func_id, noir_function) in &mut unresolved_functions.functions { + // Attach any trait constraints on the impl to the function noir_function.def.where_clause.append(&mut trait_impl.where_clause.clone()); let location = Location::new(noir_function.def.span, self.file_id); context.def_interner.push_function(*func_id, &noir_function.def, module, location); @@ -185,6 +188,7 @@ impl<'a> ModCollector<'a> { generics: trait_impl.impl_generics, where_clause: trait_impl.where_clause, trait_generics: trait_impl.trait_generics, + is_comptime: trait_impl.is_comptime, // These last fields are filled later on trait_id: None, @@ -318,7 +322,10 @@ impl<'a> ModCollector<'a> { // And store the TypeId -> StructType mapping somewhere it is reachable self.def_collector.items.types.insert(id, unresolved); - context.def_interner.add_definition_location(ReferenceId::Struct(id)); + context.def_interner.add_definition_location( + ReferenceId::Struct(id), + Some(ModuleId { krate, local_id: self.module_id }), + ); } definition_errors } @@ -329,6 +336,7 @@ impl<'a> ModCollector<'a> { &mut self, context: &mut Context, type_aliases: Vec, + krate: CrateId, ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; for type_alias in type_aliases { @@ -365,7 +373,10 @@ impl<'a> ModCollector<'a> { self.def_collector.items.type_aliases.insert(type_alias_id, unresolved); - context.def_interner.add_definition_location(ReferenceId::Alias(type_alias_id)); + context.def_interner.add_definition_location( + ReferenceId::Alias(type_alias_id), + Some(ModuleId { krate, local_id: self.module_id }), + ); } errors } @@ -483,6 +494,7 @@ impl<'a> ModCollector<'a> { let global_id = context.def_interner.push_empty_global( name.clone(), trait_id.0.local_id, + krate, self.file_id, vec![], false, @@ -532,7 +544,10 @@ impl<'a> ModCollector<'a> { }; context.def_interner.push_empty_trait(trait_id, &unresolved, resolved_generics); - context.def_interner.add_definition_location(ReferenceId::Trait(trait_id)); + context.def_interner.add_definition_location( + ReferenceId::Trait(trait_id), + Some(ModuleId { krate, local_id: self.module_id }), + ); self.def_collector.items.traits.insert(trait_id, unresolved); } @@ -649,9 +664,7 @@ impl<'a> ModCollector<'a> { ) { Ok(child_mod_id) => { // Track that the "foo" in `mod foo;` points to the module "foo" - let referenced = ReferenceId::Module(child_mod_id); - let reference = ReferenceId::Reference(location, false); - context.def_interner.add_reference(referenced, reference); + context.def_interner.add_module_reference(child_mod_id, location); errors.extend(collect_defs( self.def_collector, @@ -722,7 +735,14 @@ impl<'a> ModCollector<'a> { return Err(err); } - context.def_interner.add_module_location(mod_id, mod_location); + context.def_interner.add_module_attributes( + mod_id, + ModuleAttributes { + name: mod_name.0.contents.clone(), + location: mod_location, + parent: self.module_id, + }, + ); } Ok(mod_id) @@ -845,12 +865,14 @@ pub(crate) fn collect_global( global: LetStatement, file_id: FileId, module_id: LocalModuleId, + crate_id: CrateId, ) -> (UnresolvedGlobal, Option<(CompilationError, FileId)>) { let name = global.pattern.name_ident().clone(); let global_id = interner.push_empty_global( name.clone(), module_id, + crate_id, file_id, global.attributes.clone(), matches!(global.pattern, Pattern::Mutable { .. }), diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 37c5a460667..1ccf8dd4792 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -70,6 +70,15 @@ pub enum DefCollectorErrorKind { MacroError(MacroError), #[error("The only supported types of numeric generics are integers, fields, and booleans")] UnsupportedNumericGenericType { ident: Ident, typ: UnresolvedTypeData }, + #[error("impl has stricter requirements than trait")] + ImplIsStricterThanTrait { + constraint_typ: crate::Type, + constraint_name: String, + constraint_generics: Vec, + constraint_span: Span, + trait_method_name: String, + trait_method_span: Span, + }, } /// An error struct that macro processors can return. @@ -251,6 +260,24 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { ident.0.span(), ) } + DefCollectorErrorKind::ImplIsStricterThanTrait { constraint_typ, constraint_name, constraint_generics, constraint_span, trait_method_name, trait_method_span } => { + let mut constraint_name_with_generics = constraint_name.to_owned(); + if !constraint_generics.is_empty() { + constraint_name_with_generics.push('<'); + for generic in constraint_generics.iter() { + constraint_name_with_generics.push_str(generic.to_string().as_str()); + } + constraint_name_with_generics.push('>'); + } + + let mut diag = Diagnostic::simple_error( + "impl has stricter requirements than trait".to_string(), + format!("impl has extra requirement `{constraint_typ}: {constraint_name_with_generics}`"), + *constraint_span, + ); + diag.add_secondary(format!("definition of `{trait_method_name}` from trait"), *trait_method_span); + diag + } } } } diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 43d1548dc29..9de96ab06e8 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -73,7 +73,6 @@ impl CrateDefMap { pub fn collect_defs( crate_id: CrateId, context: &mut Context, - use_legacy: bool, debug_comptime_in_file: Option<&str>, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { @@ -118,12 +117,11 @@ impl CrateDefMap { }; // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( + errors.extend(DefCollector::collect_crate_and_dependencies( def_map, context, ast, root_file_id, - use_legacy, debug_comptime_in_file, macro_processors, )); diff --git a/compiler/noirc_frontend/src/hir/resolution/functions.rs b/compiler/noirc_frontend/src/hir/resolution/functions.rs deleted file mode 100644 index fe46796ed24..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/functions.rs +++ /dev/null @@ -1,84 +0,0 @@ -use std::collections::BTreeMap; - -use fm::FileId; -use iter_extended::vecmap; - -use crate::{ - graph::CrateId, - hir::{ - def_collector::dc_crate::{CompilationError, UnresolvedFunctions}, - def_map::{CrateDefMap, ModuleId}, - }, - node_interner::{FuncId, NodeInterner, TraitImplId}, - ResolvedGeneric, Type, -}; - -use super::{path_resolver::StandardPathResolver, Resolver}; - -#[allow(clippy::too_many_arguments)] -pub(crate) fn resolve_function_set( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - mut unresolved_functions: UnresolvedFunctions, - self_type: Option, - trait_impl_id: Option, - impl_generics: Vec, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let file_id = unresolved_functions.file_id; - - let where_clause_errors = - unresolved_functions.resolve_trait_bounds_trait_ids(def_maps, crate_id); - errors.extend(where_clause_errors.iter().cloned().map(|e| (e.into(), file_id))); - - vecmap(unresolved_functions.functions, |(mod_id, func_id, func)| { - let module_id = ModuleId { krate: crate_id, local_id: mod_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file_id); - // Must use set_generics here to ensure we re-use the same generics from when - // the impl was originally collected. Otherwise the function will be using different - // TypeVariables for the same generic, causing it to instantiate incorrectly. - resolver.set_generics(impl_generics.clone()); - resolver.set_self_type(self_type.clone()); - resolver.set_trait_id(unresolved_functions.trait_id); - resolver.set_trait_impl_id(trait_impl_id); - - // Without this, impl methods can accidentally be placed in contracts. See #3254 - if self_type.is_some() { - resolver.set_in_contract(false); - } - - let (hir_func, func_meta, errs) = resolver.resolve_function(func, func_id); - interner.push_fn_meta(func_meta, func_id); - interner.update_fn(func_id, hir_func); - errors.extend(errs.iter().cloned().map(|e| (e.into(), file_id))); - (file_id, func_id) - }) -} - -pub(crate) fn resolve_free_functions( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - collected_functions: Vec, - self_type: Option, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - collected_functions - .into_iter() - .flat_map(|unresolved_functions| { - resolve_function_set( - interner, - crate_id, - def_maps, - unresolved_functions, - self_type.clone(), - None, - vec![], // no impl generics - errors, - ) - }) - .collect() -} diff --git a/compiler/noirc_frontend/src/hir/resolution/globals.rs b/compiler/noirc_frontend/src/hir/resolution/globals.rs deleted file mode 100644 index bcda4e75d3d..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/globals.rs +++ /dev/null @@ -1,46 +0,0 @@ -use super::{path_resolver::StandardPathResolver, resolver::Resolver, take_errors}; -use crate::{ - graph::CrateId, - hir::{ - def_collector::dc_crate::{CompilationError, UnresolvedGlobal}, - def_map::ModuleId, - Context, - }, - node_interner::GlobalId, -}; -use fm::FileId; -use iter_extended::vecmap; - -#[derive(Default)] -pub(crate) struct ResolvedGlobals { - pub(crate) globals: Vec<(FileId, GlobalId)>, - pub(crate) errors: Vec<(CompilationError, FileId)>, -} - -pub(crate) fn resolve_globals( - context: &mut Context, - globals: Vec, - crate_id: CrateId, -) -> ResolvedGlobals { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - let globals = vecmap(globals, |global| { - let module_id = ModuleId { local_id: global.module_id, krate: crate_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let mut resolver = Resolver::new( - &mut context.def_interner, - &path_resolver, - &context.def_maps, - global.file_id, - ); - - let hir_stmt = resolver.resolve_global_let(global.stmt_def, global.global_id); - errors.extend(take_errors(global.file_id, resolver)); - - let statement_id = context.def_interner.get_global(global.global_id).let_statement; - context.def_interner.replace_statement(statement_id, hir_stmt); - - (global.file_id, global.global_id) - }); - ResolvedGlobals { globals, errors } -} diff --git a/compiler/noirc_frontend/src/hir/resolution/impls.rs b/compiler/noirc_frontend/src/hir/resolution/impls.rs deleted file mode 100644 index 7efd1eed86e..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/impls.rs +++ /dev/null @@ -1,145 +0,0 @@ -use std::collections::BTreeMap; - -use fm::FileId; - -use crate::ast::ItemVisibility; -use crate::{ - graph::CrateId, - hir::{ - def_collector::{ - dc_crate::{CompilationError, ImplMap}, - errors::DefCollectorErrorKind, - }, - def_map::{CrateDefMap, ModuleId}, - Context, - }, - node_interner::{FuncId, NodeInterner}, - Type, -}; - -use super::{ - errors::ResolverError, functions, get_module_mut, get_struct_type, - path_resolver::StandardPathResolver, resolver::Resolver, take_errors, -}; - -/// Go through the list of impls and add each function within to the scope -/// of the module defined by its type. -pub(crate) fn collect_impls( - context: &mut Context, - crate_id: CrateId, - collected_impls: &ImplMap, -) -> Vec<(CompilationError, FileId)> { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - - for ((unresolved_type, module_id), methods) in collected_impls { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: *module_id, krate: crate_id }); - - let file = def_maps[&crate_id].file_id(*module_id); - - for (generics, span, unresolved) in methods { - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(generics); - let typ = resolver.resolve_type(unresolved_type.clone()); - - errors.extend(take_errors(unresolved.file_id, resolver)); - - if let Some(struct_type) = get_struct_type(&typ) { - let struct_type = struct_type.borrow(); - - // `impl`s are only allowed on types defined within the current crate - if struct_type.id.krate() != crate_id { - let span = *span; - let type_name = struct_type.name.to_string(); - let error = DefCollectorErrorKind::ForeignImpl { span, type_name }; - errors.push((error.into(), unresolved.file_id)); - continue; - } - - // Grab the module defined by the struct type. Note that impls are a case - // where the module the methods are added to is not the same as the module - // they are resolved in. - let module = get_module_mut(def_maps, struct_type.id.module_id()); - - for (_, method_id, method) in &unresolved.functions { - // If this method was already declared, remove it from the module so it cannot - // be accessed with the `TypeName::method` syntax. We'll check later whether the - // object types in each method overlap or not. If they do, we issue an error. - // If not, that is specialization which is allowed. - if module - .declare_function( - method.name_ident().clone(), - ItemVisibility::Public, - *method_id, - ) - .is_err() - { - module.remove_function(method.name_ident()); - } - } - // Prohibit defining impls for primitive types if we're not in the stdlib - } else if typ != Type::Error && !crate_id.is_stdlib() { - let span = *span; - let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; - errors.push((error.into(), unresolved.file_id)); - } - } - } - errors -} - -pub(crate) fn resolve_impls( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - collected_impls: ImplMap, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let mut file_method_ids = Vec::new(); - - for ((unresolved_type, module_id), methods) in collected_impls { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: module_id, krate: crate_id }); - - let file = def_maps[&crate_id].file_id(module_id); - - for (generics, _, functions) in methods { - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(&generics); - let generics = resolver.get_generics().to_vec(); - let self_type = resolver.resolve_type(unresolved_type.clone()); - - let mut file_func_ids = functions::resolve_function_set( - interner, - crate_id, - def_maps, - functions, - Some(self_type.clone()), - None, - generics, - errors, - ); - if self_type != Type::Error { - for (file_id, method_id) in &file_func_ids { - let method_name = interner.function_name(method_id).to_owned(); - - if let Some(first_fn) = - interner.add_method(&self_type, method_name.clone(), *method_id, false) - { - let error = ResolverError::DuplicateDefinition { - name: method_name, - first_span: interner.function_ident(&first_fn).span(), - second_span: interner.function_ident(method_id).span(), - }; - errors.push((error.into(), *file_id)); - } - } - } - file_method_ids.append(&mut file_func_ids); - } - } - - file_method_ids -} diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 710c12a91bf..10e18248dec 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -41,6 +41,8 @@ pub enum PathResolutionError { Unresolved(Ident), #[error("{0} is private and not visible from the current module")] Private(Ident), + #[error("There is no super module")] + NoSuper(Span), } #[derive(Debug)] @@ -73,6 +75,9 @@ impl<'a> From<&'a PathResolutionError> for CustomDiagnostic { format!("{ident} is private"), ident.span(), ), + PathResolutionError::NoSuper(span) => { + CustomDiagnostic::simple_error(error.to_string(), String::new(), *span) + } } } } @@ -81,7 +86,7 @@ pub fn resolve_import( crate_id: CrateId, import_directive: &ImportDirective, def_maps: &BTreeMap, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> Result { let module_scope = import_directive.module_id; let NamespaceResolution { @@ -126,7 +131,7 @@ fn resolve_path_to_ns( crate_id: CrateId, importing_crate: CrateId, def_maps: &BTreeMap, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> NamespaceResolutionResult { let import_path = &import_directive.path.segments; let def_map = &def_maps[&crate_id]; @@ -187,6 +192,25 @@ fn resolve_path_to_ns( path_references, importing_crate, ), + + crate::ast::PathKind::Super => { + if let Some(parent_module_id) = + def_maps[&crate_id].modules[import_directive.module_id.0].parent + { + resolve_name_in_module( + crate_id, + importing_crate, + import_path, + parent_module_id, + def_maps, + path_references, + ) + } else { + let span_start = import_directive.path.span().start(); + let span = Span::from(span_start..span_start + 5); // 5 == "super".len() + Err(PathResolutionError::NoSuper(span)) + } + } } } @@ -196,7 +220,7 @@ fn resolve_path_from_crate_root( import_path: &[Ident], def_maps: &BTreeMap, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> NamespaceResolutionResult { resolve_name_in_module( crate_id, @@ -214,7 +238,7 @@ fn resolve_name_in_module( import_path: &[Ident], starting_mod: LocalModuleId, def_maps: &BTreeMap, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> NamespaceResolutionResult { let def_map = &def_maps[&krate]; let mut current_mod_id = ModuleId { krate, local_id: starting_mod }; @@ -247,7 +271,7 @@ fn resolve_name_in_module( current_mod_id = match typ { ModuleDefId::ModuleId(id) => { if let Some(path_references) = path_references { - path_references.push(ReferenceId::Module(id)); + path_references.push(Some(ReferenceId::Module(id))); } id } @@ -255,14 +279,14 @@ fn resolve_name_in_module( // TODO: If impls are ever implemented, types can be used in a path ModuleDefId::TypeId(id) => { if let Some(path_references) = path_references { - path_references.push(ReferenceId::Struct(id)); + path_references.push(Some(ReferenceId::Struct(id))); } id.module_id() } ModuleDefId::TypeAliasId(_) => panic!("type aliases cannot be used in type namespace"), ModuleDefId::TraitId(id) => { if let Some(path_references) = path_references { - path_references.push(ReferenceId::Trait(id)); + path_references.push(Some(ReferenceId::Trait(id))); } id.0 } @@ -309,7 +333,7 @@ fn resolve_external_dep( current_def_map: &CrateDefMap, directive: &ImportDirective, def_maps: &BTreeMap, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, importing_crate: CrateId, ) -> NamespaceResolutionResult { // Use extern_prelude to get the dep @@ -327,6 +351,11 @@ fn resolve_external_dep( // See `singleton_import.nr` test case for a check that such cases are handled elsewhere. let path_without_crate_name = &path[1..]; + // Given that we skipped the first segment, record that it doesn't refer to any module or type. + if let Some(path_references) = path_references { + path_references.push(None); + } + let path = Path { segments: path_without_crate_name.to_vec(), kind: PathKind::Plain, diff --git a/compiler/noirc_frontend/src/hir/resolution/mod.rs b/compiler/noirc_frontend/src/hir/resolution/mod.rs index 8c16a9cca80..01a3fe856e5 100644 --- a/compiler/noirc_frontend/src/hir/resolution/mod.rs +++ b/compiler/noirc_frontend/src/hir/resolution/mod.rs @@ -8,51 +8,3 @@ pub mod errors; pub mod import; pub mod path_resolver; -pub mod resolver; - -mod functions; -mod globals; -mod impls; -mod structs; -mod traits; -mod type_aliases; - -pub(crate) use functions::resolve_free_functions; -pub(crate) use globals::resolve_globals; -pub(crate) use impls::{collect_impls, resolve_impls}; -pub(crate) use structs::resolve_structs; -pub(crate) use traits::{ - collect_trait_impls, resolve_trait_by_path, resolve_trait_impls, resolve_traits, -}; -pub(crate) use type_aliases::resolve_type_aliases; - -use crate::{ - graph::CrateId, - hir::{ - def_collector::dc_crate::CompilationError, - def_map::{CrateDefMap, ModuleData, ModuleId}, - }, - Shared, StructType, Type, -}; -use fm::FileId; -use iter_extended::vecmap; -use resolver::Resolver; -use std::collections::BTreeMap; - -fn take_errors(file_id: FileId, resolver: Resolver<'_>) -> Vec<(CompilationError, FileId)> { - vecmap(resolver.take_errors(), |e| (e.into(), file_id)) -} - -fn get_module_mut( - def_maps: &mut BTreeMap, - module: ModuleId, -) -> &mut ModuleData { - &mut def_maps.get_mut(&module.krate).unwrap().modules[module.local_id.0] -} - -fn get_struct_type(typ: &Type) -> Option<&Shared> { - match typ { - Type::Struct(definition, _) => Some(definition), - _ => None, - } -} diff --git a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs index c3dc76b635f..7cd44a84018 100644 --- a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs @@ -9,12 +9,13 @@ use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; pub trait PathResolver { /// Resolve the given path returning the resolved ModuleDefId. /// If `path_references` is `Some`, a `ReferenceId` for each segment in `path` - /// will be resolved and pushed. + /// will be resolved and pushed (some entries will be None if they don't refer to + /// a module or type). fn resolve( &self, def_maps: &BTreeMap, path: Path, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> PathResolutionResult; fn local_module_id(&self) -> LocalModuleId; @@ -38,7 +39,7 @@ impl PathResolver for StandardPathResolver { &self, def_maps: &BTreeMap, path: Path, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> PathResolutionResult { resolve_path(def_maps, self.module_id, path, path_references) } @@ -58,7 +59,7 @@ pub fn resolve_path( def_maps: &BTreeMap, module_id: ModuleId, path: Path, - path_references: &mut Option<&mut Vec>, + path_references: &mut Option<&mut Vec>>, ) -> PathResolutionResult { // lets package up the path into an ImportDirective and resolve it using that let import = diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs deleted file mode 100644 index 856a769c9dd..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ /dev/null @@ -1,2204 +0,0 @@ -// Fix usage of intern and resolve -// In some places, we do intern, however in others we are resolving and interning -// Ideally, I want to separate the interning and resolving abstractly -// so separate functions, but combine them naturally -// This could be possible, if lowering, is given a mutable map/scope as a parameter. -// So that it can match Idents to Ids. This is close to what the Scope map looks like -// Except for the num_times_used parameter. -// We can instead have a map from Ident to Into and implement that trait on ResolverMeta -// -// -// XXX: Change mentions of intern to resolve. In regards to the above comment -// -// XXX: Resolver does not check for unused functions -use acvm::acir::AcirField; - -use crate::hir_def::expr::{ - HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCapturedVar, - HirCastExpression, HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, - HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, HirMemberAccess, - HirMethodCallExpression, HirPrefixExpression, ImplKind, -}; - -use crate::hir_def::function::FunctionBody; -use crate::hir_def::traits::{Trait, TraitConstraint}; -use crate::macros_api::SecondaryAttribute; -use crate::token::Attributes; -use regex::Regex; -use std::collections::{BTreeMap, BTreeSet, HashSet}; -use std::rc::Rc; - -use crate::ast::{ - ArrayLiteral, BinaryOpKind, BlockExpression, Expression, ExpressionKind, ForRange, - FunctionDefinition, FunctionKind, FunctionReturnType, Ident, ItemVisibility, LValue, - LetStatement, Literal, NoirFunction, NoirStruct, NoirTypeAlias, Param, Path, PathKind, Pattern, - Statement, StatementKind, TraitBound, UnaryOp, UnresolvedGeneric, UnresolvedGenerics, - UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, - Visibility, ERROR_IDENT, -}; -use crate::graph::CrateId; -use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; -use crate::hir::{ - comptime::{Interpreter, Value}, - def_map::CrateDefMap, - resolution::path_resolver::PathResolver, -}; -use crate::hir_def::stmt::{HirAssignStatement, HirForStatement, HirLValue, HirPattern}; -use crate::node_interner::{ - DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, NodeInterner, StmtId, - StructId, TraitId, TraitImplId, TraitMethodId, TypeAliasId, -}; -use crate::{ - GenericTypeVars, Generics, Kind, ResolvedGeneric, Shared, StructType, Type, TypeAlias, - TypeVariable, TypeVariableKind, -}; -use fm::FileId; -use iter_extended::vecmap; -use noirc_errors::{Location, Span, Spanned}; - -use crate::hir::scope::{ - Scope as GenericScope, ScopeForest as GenericScopeForest, ScopeTree as GenericScopeTree, -}; -use crate::hir_def::{ - function::{FuncMeta, HirFunction}, - stmt::{HirConstrainStatement, HirLetStatement, HirStatement}, -}; - -use super::errors::{PubPosition, ResolverError}; -use super::import::PathResolution; - -pub const SELF_TYPE_NAME: &str = "Self"; -pub const WILDCARD_TYPE: &str = "_"; - -type Scope = GenericScope; -type ScopeTree = GenericScopeTree; -type ScopeForest = GenericScopeForest; - -pub struct LambdaContext { - pub captures: Vec, - /// the index in the scope tree - /// (sometimes being filled by ScopeTree's find method) - pub scope_index: usize, -} - -/// The primary jobs of the Resolver are to validate that every variable found refers to exactly 1 -/// definition in scope, and to convert the AST into the HIR. -/// -/// A Resolver is a short-lived struct created to resolve a top-level definition. -/// One of these is created for each function definition and struct definition. -/// This isn't strictly necessary to its function, it could be refactored out in the future. -pub struct Resolver<'a> { - scopes: ScopeForest, - path_resolver: &'a dyn PathResolver, - def_maps: &'a BTreeMap, - trait_id: Option, - trait_bounds: Vec, - pub interner: &'a mut NodeInterner, - errors: Vec, - file: FileId, - - /// Set to the current type if we're resolving an impl - self_type: Option, - - /// If we're currently resolving methods within a trait impl, this will be set - /// to the corresponding trait impl ID. - current_trait_impl: Option, - - /// The current dependency item we're resolving. - /// Used to link items to their dependencies in the dependency graph - current_item: Option, - - /// In-resolution names - /// - /// This needs to be a set because we can have multiple in-resolution - /// names when resolving structs that are declared in reverse order of their - /// dependencies, such as in the following case: - /// - /// ``` - /// struct Wrapper { - /// value: Wrapped - /// } - /// struct Wrapped { - /// } - /// ``` - resolving_ids: BTreeSet, - - /// True if the current module is a contract. - /// This is usually determined by self.path_resolver.module_id(), but it can - /// be overridden for impls. Impls are an odd case since the methods within resolve - /// as if they're in the parent module, but should be placed in a child module. - /// Since they should be within a child module, in_contract is manually set to false - /// for these so we can still resolve them in the parent module without them being in a contract. - in_contract: bool, - - /// Contains a mapping of the current struct or functions's generics to - /// unique type variables if we're resolving a struct. Empty otherwise. - /// This is a Vec rather than a map to preserve the order a functions generics - /// were declared in. - generics: Vec, - - /// When resolving lambda expressions, we need to keep track of the variables - /// that are captured. We do this in order to create the hidden environment - /// parameter for the lambda function. - lambda_stack: Vec, - - /// True if we're currently resolving an unconstrained function - in_unconstrained_fn: bool, - - /// How many loops we're currently within. - /// This increases by 1 at the start of a loop, and decreases by 1 when it ends. - nested_loops: u32, -} - -/// ResolverMetas are tagged onto each definition to track how many times they are used -#[derive(Debug, PartialEq, Eq)] -struct ResolverMeta { - num_times_used: usize, - ident: HirIdent, - warn_if_unused: bool, -} - -pub enum ResolvePathError { - WrongKind, - NotFound, -} - -impl<'a> Resolver<'a> { - pub fn new( - interner: &'a mut NodeInterner, - path_resolver: &'a dyn PathResolver, - def_maps: &'a BTreeMap, - file: FileId, - ) -> Resolver<'a> { - let module_id = path_resolver.module_id(); - let in_contract = module_id.module(def_maps).is_contract; - - Self { - path_resolver, - def_maps, - trait_id: None, - trait_bounds: Vec::new(), - scopes: ScopeForest::default(), - interner, - self_type: None, - generics: Vec::new(), - errors: Vec::new(), - lambda_stack: Vec::new(), - current_trait_impl: None, - current_item: None, - resolving_ids: BTreeSet::new(), - file, - in_contract, - in_unconstrained_fn: false, - nested_loops: 0, - } - } - - pub fn set_self_type(&mut self, self_type: Option) { - self.self_type = self_type; - } - - pub fn set_trait_id(&mut self, trait_id: Option) { - self.trait_id = trait_id; - } - - pub fn set_trait_impl_id(&mut self, impl_id: Option) { - self.current_trait_impl = impl_id; - } - - pub fn get_self_type(&mut self) -> Option<&Type> { - self.self_type.as_ref() - } - - fn push_err(&mut self, err: ResolverError) { - self.errors.push(err); - } - - /// This turns function parameters of the form: - /// fn foo(x: impl Bar) - /// - /// into - /// fn foo(x: T0_impl_Bar) where T0_impl_Bar: Bar - fn desugar_impl_trait_args(&mut self, func: &mut NoirFunction, func_id: FuncId) { - let mut impl_trait_generics = HashSet::new(); - let mut counter: usize = 0; - for parameter in func.def.parameters.iter_mut() { - if let UnresolvedTypeData::TraitAsType(path, args) = ¶meter.typ.typ { - let mut new_generic_ident: Ident = - format!("T{}_impl_{}", func_id, path.as_string()).into(); - let mut new_generic_path = Path::from_ident(new_generic_ident.clone()); - let new_generic = UnresolvedGeneric::from(new_generic_ident.clone()); - while impl_trait_generics.contains(&new_generic) - || self.lookup_generic_or_global_type(&new_generic_path).is_some() - { - new_generic_ident = - format!("T{}_impl_{}_{}", func_id, path.as_string(), counter).into(); - new_generic_path = Path::from_ident(new_generic_ident.clone()); - counter += 1; - } - impl_trait_generics.insert(UnresolvedGeneric::from(new_generic_ident.clone())); - - let is_synthesized = true; - let new_generic_type_data = - UnresolvedTypeData::Named(new_generic_path, vec![], is_synthesized); - let new_generic_type = - UnresolvedType { typ: new_generic_type_data.clone(), span: None }; - let new_trait_bound = TraitBound { - trait_path: path.clone(), - trait_id: None, - trait_generics: args.to_vec(), - }; - let new_trait_constraint = UnresolvedTraitConstraint { - typ: new_generic_type, - trait_bound: new_trait_bound, - }; - - parameter.typ.typ = new_generic_type_data; - func.def.generics.push(new_generic_ident.into()); - func.def.where_clause.push(new_trait_constraint); - } - } - self.add_generics(&impl_trait_generics.into_iter().collect()); - } - - /// Resolving a function involves interning the metadata - /// interning any statements inside of the function - /// and interning the function itself - /// We resolve and lower the function at the same time - /// Since lowering would require scope data, unless we add an extra resolution field to the AST - pub fn resolve_function( - mut self, - mut func: NoirFunction, - func_id: FuncId, - ) -> (HirFunction, FuncMeta, Vec) { - self.scopes.start_function(); - self.current_item = Some(DependencyId::Function(func_id)); - - // Check whether the function has globals in the local module and add them to the scope - self.resolve_local_globals(); - self.add_generics(&func.def.generics); - - self.desugar_impl_trait_args(&mut func, func_id); - self.trait_bounds = func.def.where_clause.clone(); - - let is_low_level_or_oracle = func - .attributes() - .function - .as_ref() - .map_or(false, |func| func.is_low_level() || func.is_oracle()); - let (hir_func, func_meta) = self.intern_function(func, func_id); - let func_scope_tree = self.scopes.end_function(); - - // The arguments to low-level and oracle functions are always unused so we do not produce warnings for them. - if !is_low_level_or_oracle { - self.check_for_unused_variables_in_scope_tree(func_scope_tree); - } - - self.trait_bounds.clear(); - (hir_func, func_meta, self.errors) - } - - pub fn resolve_trait_function( - &mut self, - name: &Ident, - generics: &UnresolvedGenerics, - parameters: &[(Ident, UnresolvedType)], - return_type: &FunctionReturnType, - where_clause: &[UnresolvedTraitConstraint], - func_id: FuncId, - ) -> (HirFunction, FuncMeta) { - self.scopes.start_function(); - - // Check whether the function has globals in the local module and add them to the scope - self.resolve_local_globals(); - - self.trait_bounds = where_clause.to_vec(); - - let kind = FunctionKind::Normal; - let def = FunctionDefinition { - name: name.clone(), - attributes: Attributes::empty(), - is_unconstrained: false, - is_comptime: false, - visibility: ItemVisibility::Public, // Trait functions are always public - generics: generics.clone(), - parameters: vecmap(parameters, |(name, typ)| Param { - visibility: Visibility::Private, - pattern: Pattern::Identifier(name.clone()), - typ: typ.clone(), - span: name.span(), - }), - body: BlockExpression { statements: Vec::new() }, - span: name.span(), - where_clause: where_clause.to_vec(), - return_type: return_type.clone(), - return_visibility: Visibility::Private, - }; - - let (hir_func, func_meta) = self.intern_function(NoirFunction { kind, def }, func_id); - let _ = self.scopes.end_function(); - // Don't check the scope tree for unused variables, they can't be used in a declaration anyway. - self.trait_bounds.clear(); - (hir_func, func_meta) - } - - fn check_for_unused_variables_in_scope_tree(&mut self, scope_decls: ScopeTree) { - let mut unused_vars = Vec::new(); - for scope in scope_decls.0.into_iter() { - Resolver::check_for_unused_variables_in_local_scope(scope, &mut unused_vars); - } - - for unused_var in unused_vars.iter() { - if let Some(definition_info) = self.interner.try_definition(unused_var.id) { - let name = &definition_info.name; - if name != ERROR_IDENT && !definition_info.is_global() { - let ident = Ident(Spanned::from(unused_var.location.span, name.to_owned())); - self.push_err(ResolverError::UnusedVariable { ident }); - } - } - } - } - - fn check_for_unused_variables_in_local_scope(decl_map: Scope, unused_vars: &mut Vec) { - let unused_variables = decl_map.filter(|(variable_name, metadata)| { - let has_underscore_prefix = variable_name.starts_with('_'); // XXX: This is used for development mode, and will be removed - metadata.warn_if_unused && metadata.num_times_used == 0 && !has_underscore_prefix - }); - unused_vars.extend(unused_variables.map(|(_, meta)| meta.ident.clone())); - } - - /// Run the given function in a new scope. - fn in_new_scope T>(&mut self, f: F) -> T { - self.scopes.start_scope(); - let ret = f(self); - let scope = self.scopes.end_scope(); - self.check_for_unused_variables_in_scope_tree(scope.into()); - ret - } - - fn add_variable_decl( - &mut self, - name: Ident, - mutable: bool, - allow_shadowing: bool, - definition: DefinitionKind, - ) -> HirIdent { - self.add_variable_decl_inner(name, mutable, allow_shadowing, true, definition) - } - - fn add_variable_decl_inner( - &mut self, - name: Ident, - mutable: bool, - allow_shadowing: bool, - warn_if_unused: bool, - definition: DefinitionKind, - ) -> HirIdent { - if definition.is_global() { - return self.add_global_variable_decl(name, definition); - } - - let location = Location::new(name.span(), self.file); - let var_name = name.0.contents.clone(); - let id = self.interner.push_definition(var_name, mutable, false, definition, location); - let ident = HirIdent::non_trait_method(id, location); - let resolver_meta = - ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused }; - - let scope = self.scopes.get_mut_scope(); - let old_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); - - if !allow_shadowing { - if let Some(old_value) = old_value { - self.push_err(ResolverError::DuplicateDefinition { - name: name.0.contents, - first_span: old_value.ident.location.span, - second_span: location.span, - }); - } - } - - ident - } - - fn add_global_variable_decl(&mut self, name: Ident, definition: DefinitionKind) -> HirIdent { - let scope = self.scopes.get_mut_scope(); - - // This check is necessary to maintain the same definition ids in the interner. Currently, each function uses a new resolver that has its own ScopeForest and thus global scope. - // We must first check whether an existing definition ID has been inserted as otherwise there will be multiple definitions for the same global statement. - // This leads to an error in evaluation where the wrong definition ID is selected when evaluating a statement using the global. The check below prevents this error. - let mut global_id = None; - let global = self.interner.get_all_globals(); - for global_info in global { - if global_info.ident == name - && global_info.local_id == self.path_resolver.local_module_id() - { - global_id = Some(global_info.id); - } - } - - let (ident, resolver_meta) = if let Some(id) = global_id { - let global = self.interner.get_global(id); - let hir_ident = HirIdent::non_trait_method(global.definition_id, global.location); - let ident = hir_ident.clone(); - let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; - (hir_ident, resolver_meta) - } else { - let location = Location::new(name.span(), self.file); - let var_name = name.0.contents.clone(); - let id = self.interner.push_definition(var_name, false, false, definition, location); - let ident = HirIdent::non_trait_method(id, location); - let resolver_meta = - ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused: true }; - (ident, resolver_meta) - }; - - let old_global_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); - if let Some(old_global_value) = old_global_value { - self.push_err(ResolverError::DuplicateDefinition { - name: name.0.contents.clone(), - first_span: old_global_value.ident.location.span, - second_span: name.span(), - }); - } - ident - } - - // Checks for a variable having been declared before - // variable declaration and definition cannot be separate in Noir - // Once the variable has been found, intern and link `name` to this definition - // return the IdentId of `name` - // - // If a variable is not found, then an error is logged and a dummy id - // is returned, for better error reporting UX - fn find_variable_or_default(&mut self, name: &Ident) -> (HirIdent, usize) { - self.find_variable(name).unwrap_or_else(|error| { - self.push_err(error); - let id = DefinitionId::dummy_id(); - let location = Location::new(name.span(), self.file); - (HirIdent::non_trait_method(id, location), 0) - }) - } - - fn find_variable(&mut self, name: &Ident) -> Result<(HirIdent, usize), ResolverError> { - // Find the definition for this Ident - let scope_tree = self.scopes.current_scope_tree(); - let variable = scope_tree.find(&name.0.contents); - - let location = Location::new(name.span(), self.file); - if let Some((variable_found, scope)) = variable { - variable_found.num_times_used += 1; - let id = variable_found.ident.id; - Ok((HirIdent::non_trait_method(id, location), scope)) - } else { - Err(ResolverError::VariableNotDeclared { - name: name.0.contents.clone(), - span: name.0.span(), - }) - } - } - - fn intern_function(&mut self, func: NoirFunction, id: FuncId) -> (HirFunction, FuncMeta) { - let func_meta = self.extract_meta(&func, id); - - if func.def.is_unconstrained { - self.in_unconstrained_fn = true; - } - - let hir_func = match func.kind { - FunctionKind::Builtin | FunctionKind::LowLevel | FunctionKind::Oracle => { - HirFunction::empty() - } - FunctionKind::Normal | FunctionKind::Recursive => { - let expr_id = self.intern_block(func.def.body); - self.interner.push_expr_location(expr_id, func.def.span, self.file); - HirFunction::unchecked_from_expr(expr_id) - } - }; - - (hir_func, func_meta) - } - - pub fn resolve_trait_constraint( - &mut self, - constraint: UnresolvedTraitConstraint, - ) -> Option { - let typ = self.resolve_type(constraint.typ); - let trait_generics = - vecmap(constraint.trait_bound.trait_generics, |typ| self.resolve_type(typ)); - - let span = constraint.trait_bound.trait_path.span(); - let the_trait = self.lookup_trait_or_error(constraint.trait_bound.trait_path)?; - let trait_id = the_trait.id; - - let expected_generics = the_trait.generics.len(); - let actual_generics = trait_generics.len(); - - if actual_generics != expected_generics { - let item_name = the_trait.name.to_string(); - self.push_err(ResolverError::IncorrectGenericCount { - span, - item_name, - actual: actual_generics, - expected: expected_generics, - }); - } - - Some(TraitConstraint { typ, trait_id, trait_generics }) - } - - /// Translates an UnresolvedType into a Type and appends any - /// freshly created TypeVariables created to new_variables. - fn resolve_type_inner(&mut self, typ: UnresolvedType) -> Type { - use crate::ast::UnresolvedTypeData::*; - - let resolved_type = match typ.typ { - FieldElement => Type::FieldElement, - Array(size, elem) => { - let elem = Box::new(self.resolve_type_inner(*elem)); - let size = self.convert_expression_type(size); - Type::Array(Box::new(size), elem) - } - Slice(elem) => { - let elem = Box::new(self.resolve_type_inner(*elem)); - Type::Slice(elem) - } - Expression(expr) => self.convert_expression_type(expr), - Integer(sign, bits) => Type::Integer(sign, bits), - Bool => Type::Bool, - String(size) => { - let resolved_size = self.convert_expression_type(size); - Type::String(Box::new(resolved_size)) - } - FormatString(size, fields) => { - let resolved_size = self.convert_expression_type(size); - let fields = self.resolve_type_inner(*fields); - Type::FmtString(Box::new(resolved_size), Box::new(fields)) - } - Quoted(quoted) => Type::Quoted(quoted), - Unit => Type::Unit, - Unspecified => Type::Error, - Error => Type::Error, - Named(path, args, _) => self.resolve_named_type(path, args), - TraitAsType(path, args) => self.resolve_trait_as_type(path, args), - - Tuple(fields) => Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field))), - Function(args, ret, env) => { - let args = vecmap(args, |arg| self.resolve_type_inner(arg)); - let ret = Box::new(self.resolve_type_inner(*ret)); - - // expect() here is valid, because the only places we don't have a span are omitted types - // e.g. a function without return type implicitly has a spanless UnresolvedType::Unit return type - // To get an invalid env type, the user must explicitly specify the type, which will have a span - let env_span = - env.span.expect("Unexpected missing span for closure environment type"); - - let env = Box::new(self.resolve_type_inner(*env)); - - match *env { - Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _, _) => { - Type::Function(args, ret, env) - } - _ => { - self.push_err(ResolverError::InvalidClosureEnvironment { - typ: *env, - span: env_span, - }); - Type::Error - } - } - } - MutableReference(element) => { - Type::MutableReference(Box::new(self.resolve_type_inner(*element))) - } - Parenthesized(typ) => self.resolve_type_inner(*typ), - Resolved(id) => self.interner.get_quoted_type(id).clone(), - }; - - if let Type::Struct(_, _) = resolved_type { - if let Some(unresolved_span) = typ.span { - // Record the location of the type reference - self.interner.push_type_ref_location( - resolved_type.clone(), - Location::new(unresolved_span, self.file), - ); - } - } - resolved_type - } - - fn find_generic(&self, target_name: &str) -> Option<&ResolvedGeneric> { - self.generics.iter().find(|generic| generic.name.as_ref() == target_name) - } - - fn resolve_named_type(&mut self, path: Path, args: Vec) -> Type { - if args.is_empty() { - if let Some(typ) = self.lookup_generic_or_global_type(&path) { - return typ; - } - } - - // Check if the path is a type variable first. We currently disallow generics on type - // variables since we do not support higher-kinded types. - if path.segments.len() == 1 { - let name = &path.last_segment().0.contents; - - if name == SELF_TYPE_NAME { - if let Some(self_type) = self.self_type.clone() { - if !args.is_empty() { - self.push_err(ResolverError::GenericsOnSelfType { span: path.span() }); - } - return self_type; - } - } - } - - let span = path.span(); - let mut args = vecmap(args, |arg| self.resolve_type_inner(arg)); - - if let Some(type_alias) = self.lookup_type_alias(path.clone()) { - let type_alias = type_alias.borrow(); - let expected_generic_count = type_alias.generics.len(); - let type_alias_string = type_alias.to_string(); - let id = type_alias.id; - - self.verify_generics_count(expected_generic_count, &mut args, span, || { - type_alias_string - }); - - if let Some(item) = self.current_item { - self.interner.add_type_alias_dependency(item, id); - } - - // Collecting Type Alias references [Location]s to be used by LSP in order - // to resolve the definition of the type alias - self.interner.add_type_alias_ref(id, Location::new(span, self.file)); - - // Because there is no ordering to when type aliases (and other globals) are resolved, - // it is possible for one to refer to an Error type and issue no error if it is set - // equal to another type alias. Fixing this fully requires an analysis to create a DFG - // of definition ordering, but for now we have an explicit check here so that we at - // least issue an error that the type was not found instead of silently passing. - let alias = self.interner.get_type_alias(id); - return Type::Alias(alias, args); - } - - match self.lookup_struct_or_error(path) { - Some(struct_type) => { - if self.resolving_ids.contains(&struct_type.borrow().id) { - self.push_err(ResolverError::SelfReferentialStruct { - span: struct_type.borrow().name.span(), - }); - - return Type::Error; - } - - let expected_generic_count = struct_type.borrow().generics.len(); - if !self.in_contract - && self - .interner - .struct_attributes(&struct_type.borrow().id) - .iter() - .any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) - { - self.push_err(ResolverError::AbiAttributeOutsideContract { - span: struct_type.borrow().name.span(), - }); - } - self.verify_generics_count(expected_generic_count, &mut args, span, || { - struct_type.borrow().to_string() - }); - - if let Some(current_item) = self.current_item { - let dependency_id = struct_type.borrow().id; - self.interner.add_type_dependency(current_item, dependency_id); - } - - Type::Struct(struct_type, args) - } - None => Type::Error, - } - } - - fn resolve_trait_as_type(&mut self, path: Path, args: Vec) -> Type { - let args = vecmap(args, |arg| self.resolve_type_inner(arg)); - - if let Some(t) = self.lookup_trait_or_error(path) { - Type::TraitAsType(t.id, Rc::new(t.name.to_string()), args) - } else { - Type::Error - } - } - - fn verify_generics_count( - &mut self, - expected_count: usize, - args: &mut Vec, - span: Span, - type_name: impl FnOnce() -> String, - ) { - if args.len() != expected_count { - self.errors.push(ResolverError::IncorrectGenericCount { - span, - item_name: type_name(), - actual: args.len(), - expected: expected_count, - }); - - // Fix the generic count so we can continue typechecking - args.resize_with(expected_count, || Type::Error); - } - } - - fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { - if path.segments.len() == 1 { - let name = &path.last_segment().0.contents; - if let Some(generic) = self.find_generic(name) { - // We always insert a `TypeKind::Normal` as we do not support explicit numeric generics - // in the resolver - return Some(Type::NamedGeneric( - generic.type_var.clone(), - generic.name.clone(), - Kind::Normal, - )); - }; - } - - // If we cannot find a local generic of the same name, try to look up a global - match self.path_resolver.resolve(self.def_maps, path.clone(), &mut None) { - Ok(PathResolution { module_def_id: ModuleDefId::GlobalId(id), error }) => { - if let Some(current_item) = self.current_item { - self.interner.add_global_dependency(current_item, id); - } - - if let Some(error) = error { - self.push_err(error.into()); - } - Some(Type::Constant(self.eval_global_as_array_length(id, path))) - } - _ => None, - } - } - - fn convert_expression_type(&mut self, length: UnresolvedTypeExpression) -> Type { - match length { - UnresolvedTypeExpression::Variable(path) => { - self.lookup_generic_or_global_type(&path).unwrap_or_else(|| { - self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); - Type::Constant(0) - }) - } - UnresolvedTypeExpression::Constant(int, _) => Type::Constant(int), - UnresolvedTypeExpression::BinaryOperation(lhs, op, rhs, _) => { - let (lhs_span, rhs_span) = (lhs.span(), rhs.span()); - let lhs = self.convert_expression_type(*lhs); - let rhs = self.convert_expression_type(*rhs); - - match (lhs, rhs) { - (Type::Constant(lhs), Type::Constant(rhs)) => { - Type::Constant(op.function()(lhs, rhs)) - } - (lhs, _) => { - let span = - if !matches!(lhs, Type::Constant(_)) { lhs_span } else { rhs_span }; - self.push_err(ResolverError::InvalidArrayLengthExpr { span }); - Type::Constant(0) - } - } - } - } - } - - fn get_ident_from_path(&mut self, path: Path) -> (HirIdent, usize) { - let location = Location::new(path.span(), self.file); - - let error = match path.as_ident().map(|ident| self.find_variable(ident)) { - Some(Ok(found)) => return found, - // Try to look it up as a global, but still issue the first error if we fail - Some(Err(error)) => match self.lookup_global(path) { - Ok(id) => return (HirIdent::non_trait_method(id, location), 0), - Err(_) => error, - }, - None => match self.lookup_global(path) { - Ok(id) => return (HirIdent::non_trait_method(id, location), 0), - Err(error) => error, - }, - }; - self.push_err(error); - let id = DefinitionId::dummy_id(); - (HirIdent::non_trait_method(id, location), 0) - } - - /// Translates an UnresolvedType to a Type - pub fn resolve_type(&mut self, typ: UnresolvedType) -> Type { - let span = typ.span; - let resolved_type = self.resolve_type_inner(typ); - if resolved_type.is_nested_slice() { - self.errors.push(ResolverError::NestedSlices { span: span.unwrap() }); - } - resolved_type - } - - pub fn resolve_type_alias( - mut self, - unresolved: NoirTypeAlias, - alias_id: TypeAliasId, - ) -> (Type, Generics, Vec) { - let generics = self.add_generics(&unresolved.generics); - self.resolve_local_globals(); - - self.current_item = Some(DependencyId::Alias(alias_id)); - let typ = self.resolve_type(unresolved.typ); - - (typ, generics, self.errors) - } - - pub fn take_errors(self) -> Vec { - self.errors - } - - /// Return the current generics. - /// Needed to keep referring to the same type variables across many - /// methods in a single impl. - pub fn get_generics(&self) -> &[ResolvedGeneric] { - &self.generics - } - - /// Set the current generics that are in scope. - /// Unlike add_generics, this function will not create any new type variables, - /// opting to reuse the existing ones it is directly given. - pub fn set_generics(&mut self, generics: Vec) { - self.generics = generics; - } - - /// Translates a (possibly Unspecified) UnresolvedType to a Type. - /// Any UnresolvedType::Unspecified encountered are replaced with fresh type variables. - fn resolve_inferred_type(&mut self, typ: UnresolvedType) -> Type { - match &typ.typ { - UnresolvedTypeData::Unspecified => self.interner.next_type_variable(), - _ => self.resolve_type(typ), - } - } - - /// Add the given generics to scope. - /// Each generic will have a fresh Shared associated with it. - pub fn add_generics(&mut self, generics: &UnresolvedGenerics) -> Generics { - vecmap(generics, |generic| { - // Map the generic to a fresh type variable - let id = self.interner.next_type_variable_id(); - let typevar = TypeVariable::unbound(id); - let ident = generic.ident(); - let span = ident.0.span(); - - // Check for name collisions of this generic - let name = Rc::new(ident.0.contents.clone()); - - let resolved_generic = ResolvedGeneric { - name: name.clone(), - type_var: typevar, - // We only support numeric generics in the elaborator - kind: Kind::Normal, - span, - }; - if let Some(generic) = self.find_generic(&name) { - self.errors.push(ResolverError::DuplicateDefinition { - name: ident.0.contents.clone(), - first_span: generic.span, - second_span: span, - }); - } else { - self.generics.push(resolved_generic.clone()); - } - - resolved_generic - }) - } - - /// Add the given existing generics to scope. - /// This is useful for adding the same generics to many items. E.g. apply impl generics - /// to each function in the impl or trait generics to each item in the trait. - pub fn add_existing_generics( - &mut self, - unresolved_generics: &UnresolvedGenerics, - generics: &GenericTypeVars, - ) { - assert_eq!(unresolved_generics.len(), generics.len()); - - for (unresolved_generic, typevar) in unresolved_generics.iter().zip(generics) { - self.add_existing_generic( - unresolved_generic, - unresolved_generic.span(), - typevar.clone(), - ); - } - } - - pub fn add_existing_generic( - &mut self, - unresolved_generic: &UnresolvedGeneric, - span: Span, - typevar: TypeVariable, - ) { - let name = &unresolved_generic.ident().0.contents; - - // Check for name collisions of this generic - let rc_name = Rc::new(name.clone()); - - if let Some(generic) = self.find_generic(&rc_name) { - self.errors.push(ResolverError::DuplicateDefinition { - name: name.clone(), - first_span: generic.span, - second_span: span, - }); - } else { - let resolved_generic = ResolvedGeneric { - name: rc_name, - type_var: typevar.clone(), - kind: unresolved_generic - .kind() - .expect("ICE: Deprecated code should only support normal kinds"), - span, - }; - self.generics.push(resolved_generic); - } - } - - pub fn resolve_struct_fields( - mut self, - unresolved: NoirStruct, - struct_id: StructId, - ) -> (Generics, Vec<(Ident, Type)>, Vec) { - let generics = self.add_generics(&unresolved.generics); - - // Check whether the struct definition has globals in the local module and add them to the scope - self.resolve_local_globals(); - - self.current_item = Some(DependencyId::Struct(struct_id)); - - self.resolving_ids.insert(struct_id); - let fields = vecmap(unresolved.fields, |(ident, typ)| (ident, self.resolve_type(typ))); - self.resolving_ids.remove(&struct_id); - - (generics, fields, self.errors) - } - - fn resolve_local_globals(&mut self) { - let globals = vecmap(self.interner.get_all_globals(), |global| { - (global.id, global.local_id, global.ident.clone()) - }); - for (id, local_module_id, name) in globals { - if local_module_id == self.path_resolver.local_module_id() { - let definition = DefinitionKind::Global(id); - self.add_global_variable_decl(name, definition); - } - } - } - - /// TODO: This is currently only respected for generic free functions - /// there's a bunch of other places where trait constraints can pop up - fn resolve_trait_constraints( - &mut self, - where_clause: &[UnresolvedTraitConstraint], - ) -> Vec { - where_clause - .iter() - .cloned() - .filter_map(|constraint| self.resolve_trait_constraint(constraint)) - .collect() - } - - /// Extract metadata from a NoirFunction - /// to be used in analysis and intern the function parameters - /// Prerequisite: self.add_generics() has already been called with the given - /// function's generics, including any generics from the impl, if any. - fn extract_meta(&mut self, func: &NoirFunction, func_id: FuncId) -> FuncMeta { - let location = Location::new(func.name_ident().span(), self.file); - let id = self.interner.function_definition_id(func_id); - let name_ident = HirIdent::non_trait_method(id, location); - - let attributes = func.attributes().clone(); - let has_no_predicates_attribute = attributes.is_no_predicates(); - let should_fold = attributes.is_foldable(); - if !self.inline_attribute_allowed(func) { - if has_no_predicates_attribute { - self.push_err(ResolverError::NoPredicatesAttributeOnUnconstrained { - ident: func.name_ident().clone(), - }); - } else if should_fold { - self.push_err(ResolverError::FoldAttributeOnUnconstrained { - ident: func.name_ident().clone(), - }); - } - } - // Both the #[fold] and #[no_predicates] alter a function's inline type and code generation in similar ways. - // In certain cases such as type checking (for which the following flag will be used) both attributes - // indicate we should code generate in the same way. Thus, we unify the attributes into one flag here. - let has_inline_attribute = has_no_predicates_attribute || should_fold; - - let generics = vecmap(&self.generics, |generic| generic.type_var.clone()); - let mut parameters = vec![]; - let mut parameter_types = vec![]; - - for Param { visibility, pattern, typ, span: _ } in func.parameters().iter().cloned() { - if visibility == Visibility::Public && !self.pub_allowed(func) { - self.push_err(ResolverError::UnnecessaryPub { - ident: func.name_ident().clone(), - position: PubPosition::Parameter, - }); - } - - let pattern = self.resolve_pattern(pattern, DefinitionKind::Local(None)); - let typ = self.resolve_type_inner(typ); - - parameters.push((pattern, typ.clone(), visibility)); - parameter_types.push(typ); - } - - let return_type = Box::new(self.resolve_type(func.return_type())); - - self.declare_numeric_generics(¶meter_types, &return_type); - - if !self.pub_allowed(func) && func.def.return_visibility == Visibility::Public { - self.push_err(ResolverError::UnnecessaryPub { - ident: func.name_ident().clone(), - position: PubPosition::ReturnType, - }); - } - let is_low_level_function = - attributes.function.as_ref().map_or(false, |func| func.is_low_level()); - if !self.path_resolver.module_id().krate.is_stdlib() && is_low_level_function { - let error = - ResolverError::LowLevelFunctionOutsideOfStdlib { ident: func.name_ident().clone() }; - self.push_err(error); - } - - // 'pub' is required on return types for entry point functions - if self.is_entry_point_function(func) - && return_type.as_ref() != &Type::Unit - && func.def.return_visibility == Visibility::Private - { - self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); - } - // '#[recursive]' attribute is only allowed for entry point functions - if !self.is_entry_point_function(func) && func.kind == FunctionKind::Recursive { - self.push_err(ResolverError::MisplacedRecursiveAttribute { - ident: func.name_ident().clone(), - }); - } - - let mut typ = Type::Function(parameter_types, return_type, Box::new(Type::Unit)); - - if !generics.is_empty() { - typ = Type::Forall(generics, Box::new(typ)); - } - - self.interner.push_definition_type(name_ident.id, typ.clone()); - - let direct_generics = func.def.generics.iter(); - let direct_generics = direct_generics - .filter_map(|generic| self.find_generic(&generic.ident().0.contents).cloned()) - .collect(); - - FuncMeta { - name: name_ident, - kind: func.kind, - location, - typ, - direct_generics, - trait_impl: self.current_trait_impl, - parameters: parameters.into(), - return_type: func.def.return_type.clone(), - return_visibility: func.def.return_visibility, - has_body: !func.def.body.is_empty(), - trait_constraints: self.resolve_trait_constraints(&func.def.where_clause), - is_entry_point: self.is_entry_point_function(func), - has_inline_attribute, - source_crate: self.path_resolver.module_id().krate, - - // These fields are only used by the elaborator - all_generics: Vec::new(), - is_trait_function: false, - parameter_idents: Vec::new(), - function_body: FunctionBody::Resolved, - } - } - - /// Override whether this name resolver is within a contract or not. - /// This will affect which types are allowed as parameters to methods as well - /// as which modifiers are allowed on a function. - pub(crate) fn set_in_contract(&mut self, in_contract: bool) { - self.in_contract = in_contract; - } - - /// True if the 'pub' keyword is allowed on parameters in this function - /// 'pub' on function parameters is only allowed for entry point functions - fn pub_allowed(&self, func: &NoirFunction) -> bool { - self.is_entry_point_function(func) || func.attributes().is_foldable() - } - - fn is_entry_point_function(&self, func: &NoirFunction) -> bool { - if self.in_contract { - func.attributes().is_contract_entry_point() - } else { - func.name() == MAIN_FUNCTION - } - } - - fn inline_attribute_allowed(&self, func: &NoirFunction) -> bool { - // Inline attributes are only relevant for constrained functions - // as all unconstrained functions are not inlined - !func.def.is_unconstrained - } - - // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this method in favor of explicit numeric generics - fn declare_numeric_generics(&mut self, params: &[Type], return_type: &Type) { - if self.generics.is_empty() { - return; - } - - for (name_to_find, type_variable) in Self::find_numeric_generics(params, return_type) { - // Declare any generics to let users use numeric generics in scope. - // Don't issue a warning if these are unused - // - // We can fail to find the generic in self.generics if it is an implicit one created - // by the compiler. This can happen when, e.g. eliding array lengths using the slice - // syntax [T]. - if let Some(ResolvedGeneric { name, span, .. }) = - self.generics.iter().find(|generic| generic.name.as_ref() == &name_to_find) - { - let ident = Ident::new(name.to_string(), *span); - let definition = DefinitionKind::GenericType(type_variable); - self.add_variable_decl_inner(ident.clone(), false, false, false, definition); - } - } - } - - fn find_numeric_generics( - parameters: &[Type], - return_type: &Type, - ) -> Vec<(String, TypeVariable)> { - let mut found = BTreeMap::new(); - for parameter in parameters { - Self::find_numeric_generics_in_type(parameter, &mut found); - } - Self::find_numeric_generics_in_type(return_type, &mut found); - found.into_iter().collect() - } - - fn find_numeric_generics_in_type(typ: &Type, found: &mut BTreeMap) { - match typ { - Type::FieldElement - | Type::Integer(_, _) - | Type::Bool - | Type::Unit - | Type::Error - | Type::TypeVariable(_, _) - | Type::Constant(_) - | Type::NamedGeneric(_, _, _) - | Type::Quoted(_) - | Type::Forall(_, _) => (), - - Type::TraitAsType(_, _, args) => { - for arg in args { - Self::find_numeric_generics_in_type(arg, found); - } - } - - Type::Array(length, element_type) => { - if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { - found.insert(name.to_string(), type_variable.clone()); - } - Self::find_numeric_generics_in_type(element_type, found); - } - - Type::Slice(element_type) => { - Self::find_numeric_generics_in_type(element_type, found); - } - - Type::Tuple(fields) => { - for field in fields { - Self::find_numeric_generics_in_type(field, found); - } - } - - Type::Function(parameters, return_type, _env) => { - for parameter in parameters { - Self::find_numeric_generics_in_type(parameter, found); - } - Self::find_numeric_generics_in_type(return_type, found); - } - - Type::Struct(struct_type, generics) => { - for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name, _) = generic { - if struct_type.borrow().generic_is_numeric(i) { - found.insert(name.to_string(), type_variable.clone()); - } - } else { - Self::find_numeric_generics_in_type(generic, found); - } - } - } - Type::Alias(alias, generics) => { - for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name, _) = generic { - if alias.borrow().generic_is_numeric(i) { - found.insert(name.to_string(), type_variable.clone()); - } - } else { - Self::find_numeric_generics_in_type(generic, found); - } - } - } - Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), - Type::String(length) => { - if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { - found.insert(name.to_string(), type_variable.clone()); - } - } - Type::FmtString(length, fields) => { - if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { - found.insert(name.to_string(), type_variable.clone()); - } - Self::find_numeric_generics_in_type(fields, found); - } - } - } - - pub fn resolve_global_let( - &mut self, - let_stmt: LetStatement, - global_id: GlobalId, - ) -> HirStatement { - self.current_item = Some(DependencyId::Global(global_id)); - let expression = self.resolve_expression(let_stmt.expression); - let definition = DefinitionKind::Global(global_id); - - if !self.in_contract - && let_stmt.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) - { - let span = let_stmt.pattern.span(); - self.push_err(ResolverError::AbiAttributeOutsideContract { span }); - } - - if !let_stmt.comptime && matches!(let_stmt.pattern, Pattern::Mutable(..)) { - let span = let_stmt.pattern.span(); - self.push_err(ResolverError::MutableGlobal { span }); - } - - HirStatement::Let(HirLetStatement { - pattern: self.resolve_pattern(let_stmt.pattern, definition), - r#type: self.resolve_type(let_stmt.r#type), - expression, - attributes: let_stmt.attributes, - comptime: let_stmt.comptime, - }) - } - - pub fn resolve_stmt(&mut self, stmt: StatementKind, span: Span) -> HirStatement { - match stmt { - StatementKind::Let(let_stmt) => { - let expression = self.resolve_expression(let_stmt.expression); - let definition = DefinitionKind::Local(Some(expression)); - HirStatement::Let(HirLetStatement { - pattern: self.resolve_pattern(let_stmt.pattern, definition), - r#type: self.resolve_type(let_stmt.r#type), - expression, - attributes: let_stmt.attributes, - comptime: let_stmt.comptime, - }) - } - StatementKind::Constrain(constrain_stmt) => { - let expr_id = self.resolve_expression(constrain_stmt.0); - let assert_message_expr_id = - constrain_stmt.1.map(|assert_expr_id| self.resolve_expression(assert_expr_id)); - - HirStatement::Constrain(HirConstrainStatement( - expr_id, - self.file, - assert_message_expr_id, - )) - } - StatementKind::Expression(expr) => { - HirStatement::Expression(self.resolve_expression(expr)) - } - StatementKind::Semi(expr) => HirStatement::Semi(self.resolve_expression(expr)), - StatementKind::Assign(assign_stmt) => { - let identifier = self.resolve_lvalue(assign_stmt.lvalue); - let expression = self.resolve_expression(assign_stmt.expression); - let stmt = HirAssignStatement { lvalue: identifier, expression }; - HirStatement::Assign(stmt) - } - StatementKind::For(for_loop) => { - match for_loop.range { - ForRange::Range(start_range, end_range) => { - let start_range = self.resolve_expression(start_range); - let end_range = self.resolve_expression(end_range); - let (identifier, block) = (for_loop.identifier, for_loop.block); - - self.nested_loops += 1; - - // TODO: For loop variables are currently mutable by default since we haven't - // yet implemented syntax for them to be optionally mutable. - let (identifier, block) = self.in_new_scope(|this| { - let decl = this.add_variable_decl( - identifier, - false, - true, - DefinitionKind::Local(None), - ); - (decl, this.resolve_expression(block)) - }); - - self.nested_loops -= 1; - - HirStatement::For(HirForStatement { - start_range, - end_range, - block, - identifier, - }) - } - range @ ForRange::Array(_) => { - let for_stmt = - range.into_for(for_loop.identifier, for_loop.block, for_loop.span); - self.resolve_stmt(for_stmt.kind, for_loop.span) - } - } - } - StatementKind::Break => { - self.check_break_continue(true, span); - HirStatement::Break - } - StatementKind::Continue => { - self.check_break_continue(false, span); - HirStatement::Continue - } - StatementKind::Error => HirStatement::Error, - StatementKind::Comptime(statement) => { - let hir_statement = self.resolve_stmt(statement.kind, statement.span); - let statement_id = self.interner.push_stmt(hir_statement); - self.interner.push_stmt_location(statement_id, statement.span, self.file); - HirStatement::Comptime(statement_id) - } - } - } - - pub fn intern_stmt(&mut self, stmt: Statement) -> StmtId { - let hir_stmt = self.resolve_stmt(stmt.kind, stmt.span); - let id = self.interner.push_stmt(hir_stmt); - self.interner.push_stmt_location(id, stmt.span, self.file); - id - } - - fn resolve_lvalue(&mut self, lvalue: LValue) -> HirLValue { - match lvalue { - LValue::Ident(ident) => { - let ident = self.find_variable_or_default(&ident); - self.resolve_local_variable(ident.0.clone(), ident.1); - - HirLValue::Ident(ident.0, Type::Error) - } - LValue::MemberAccess { object, field_name, span } => HirLValue::MemberAccess { - object: Box::new(self.resolve_lvalue(*object)), - field_name, - location: Location::new(span, self.file), - field_index: None, - typ: Type::Error, - }, - LValue::Index { array, index, span } => { - let array = Box::new(self.resolve_lvalue(*array)); - let index = self.resolve_expression(index); - let location = Location::new(span, self.file); - HirLValue::Index { array, index, location, typ: Type::Error } - } - LValue::Dereference(lvalue, span) => { - let lvalue = Box::new(self.resolve_lvalue(*lvalue)); - let location = Location::new(span, self.file); - HirLValue::Dereference { lvalue, location, element_type: Type::Error } - } - } - } - - fn resolve_local_variable(&mut self, hir_ident: HirIdent, var_scope_index: usize) { - let mut transitive_capture_index: Option = None; - - for lambda_index in 0..self.lambda_stack.len() { - if self.lambda_stack[lambda_index].scope_index > var_scope_index { - // Beware: the same variable may be captured multiple times, so we check - // for its presence before adding the capture below. - let pos = self.lambda_stack[lambda_index] - .captures - .iter() - .position(|capture| capture.ident.id == hir_ident.id); - - if pos.is_none() { - self.lambda_stack[lambda_index].captures.push(HirCapturedVar { - ident: hir_ident.clone(), - transitive_capture_index, - }); - } - - if lambda_index + 1 < self.lambda_stack.len() { - // There is more than one closure between the current scope and - // the scope of the variable, so this is a propagated capture. - // We need to track the transitive capture index as we go up in - // the closure stack. - transitive_capture_index = Some(pos.unwrap_or( - // If this was a fresh capture, we added it to the end of - // the captures vector: - self.lambda_stack[lambda_index].captures.len() - 1, - )); - } - } - } - } - - fn resolve_array_literal(&mut self, array_literal: ArrayLiteral) -> HirArrayLiteral { - match array_literal { - ArrayLiteral::Standard(elements) => { - let elements = vecmap(elements, |elem| self.resolve_expression(elem)); - HirArrayLiteral::Standard(elements) - } - ArrayLiteral::Repeated { repeated_element, length } => { - let span = length.span; - let length = - UnresolvedTypeExpression::from_expr(*length, span).unwrap_or_else(|error| { - self.errors.push(ResolverError::ParserError(Box::new(error))); - UnresolvedTypeExpression::Constant(0, span) - }); - - let length = self.convert_expression_type(length); - let repeated_element = self.resolve_expression(*repeated_element); - - HirArrayLiteral::Repeated { repeated_element, length } - } - } - } - - pub fn resolve_expression(&mut self, expr: Expression) -> ExprId { - let hir_expr = match expr.kind { - ExpressionKind::Literal(literal) => HirExpression::Literal(match literal { - Literal::Bool(b) => HirLiteral::Bool(b), - Literal::Array(array_literal) => { - HirLiteral::Array(self.resolve_array_literal(array_literal)) - } - Literal::Slice(array_literal) => { - HirLiteral::Slice(self.resolve_array_literal(array_literal)) - } - Literal::Integer(integer, sign) => HirLiteral::Integer(integer, sign), - Literal::Str(str) => HirLiteral::Str(str), - Literal::RawStr(str, _) => HirLiteral::Str(str), - Literal::FmtStr(str) => self.resolve_fmt_str_literal(str, expr.span), - Literal::Unit => HirLiteral::Unit, - }), - ExpressionKind::Variable(path, generics) => { - let generics = - generics.map(|generics| vecmap(generics, |typ| self.resolve_type(typ))); - - if let Some((method, constraint, assumed)) = self.resolve_trait_generic_path(&path) - { - HirExpression::Ident( - HirIdent { - location: Location::new(expr.span, self.file), - id: self.interner.trait_method_id(method), - impl_kind: ImplKind::TraitMethod(method, constraint, assumed), - }, - generics, - ) - } else { - // If the Path is being used as an Expression, then it is referring to a global from a separate module - // Otherwise, then it is referring to an Identifier - // This lookup allows support of such statements: let x = foo::bar::SOME_GLOBAL + 10; - // If the expression is a singular indent, we search the resolver's current scope as normal. - let (hir_ident, var_scope_index) = self.get_ident_from_path(path.clone()); - - if hir_ident.id != DefinitionId::dummy_id() { - match self.interner.definition(hir_ident.id).kind { - DefinitionKind::Function(id) => { - if let Some(current_item) = self.current_item { - self.interner.add_function_dependency(current_item, id); - } - } - DefinitionKind::Global(global_id) => { - if let Some(current_item) = self.current_item { - self.interner.add_global_dependency(current_item, global_id); - } - } - DefinitionKind::GenericType(_) => { - // Initialize numeric generics to a polymorphic integer type in case - // they're used in expressions. We must do this here since the type - // checker does not check definition kinds and otherwise expects - // parameters to already be typed. - if self.interner.definition_type(hir_ident.id) == Type::Error { - let typ = Type::polymorphic_integer_or_field(self.interner); - self.interner.push_definition_type(hir_ident.id, typ); - } - } - DefinitionKind::Local(_) => { - // only local variables can be captured by closures. - self.resolve_local_variable(hir_ident.clone(), var_scope_index); - } - } - } - - HirExpression::Ident(hir_ident, generics) - } - } - ExpressionKind::Prefix(prefix) => { - let operator = prefix.operator; - let rhs = self.resolve_expression(prefix.rhs); - let trait_method_id = self.interner.get_prefix_operator_trait_method(&operator); - - if operator == UnaryOp::MutableReference { - if let Err(error) = verify_mutable_reference(self.interner, rhs) { - self.errors.push(error); - } - } - - HirExpression::Prefix(HirPrefixExpression { operator, rhs, trait_method_id }) - } - ExpressionKind::Infix(infix) => { - let lhs = self.resolve_expression(infix.lhs); - let rhs = self.resolve_expression(infix.rhs); - let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); - - HirExpression::Infix(HirInfixExpression { - lhs, - operator: HirBinaryOp::new(infix.operator, self.file), - trait_method_id: trait_id, - rhs, - }) - } - ExpressionKind::Call(call_expr) => { - // Get the span and name of path for error reporting - let func = self.resolve_expression(*call_expr.func); - - let arguments = vecmap(call_expr.arguments, |arg| self.resolve_expression(arg)); - let location = Location::new(expr.span, self.file); - HirExpression::Call(HirCallExpression { func, arguments, location }) - } - ExpressionKind::MethodCall(call_expr) => { - let method = call_expr.method_name; - let object = self.resolve_expression(call_expr.object); - - // Cannot verify the generic count here equals the expected count since we don't - // know which definition `method` refers to until it is resolved during type checking. - let generics = call_expr - .generics - .map(|generics| vecmap(generics, |typ| self.resolve_type(typ))); - - let arguments = vecmap(call_expr.arguments, |arg| self.resolve_expression(arg)); - let location = Location::new(expr.span, self.file); - HirExpression::MethodCall(HirMethodCallExpression { - method, - object, - generics, - arguments, - location, - }) - } - ExpressionKind::Cast(cast_expr) => HirExpression::Cast(HirCastExpression { - lhs: self.resolve_expression(cast_expr.lhs), - r#type: self.resolve_type(cast_expr.r#type), - }), - ExpressionKind::If(if_expr) => HirExpression::If(HirIfExpression { - condition: self.resolve_expression(if_expr.condition), - consequence: self.resolve_expression(if_expr.consequence), - alternative: if_expr.alternative.map(|e| self.resolve_expression(e)), - }), - ExpressionKind::Index(indexed_expr) => HirExpression::Index(HirIndexExpression { - collection: self.resolve_expression(indexed_expr.collection), - index: self.resolve_expression(indexed_expr.index), - }), - ExpressionKind::Block(block_expr) => { - HirExpression::Block(self.resolve_block(block_expr)) - } - ExpressionKind::Constructor(constructor) => { - let span = constructor.type_name.span(); - - match self.lookup_type_or_error(constructor.type_name) { - Some(Type::Struct(r#type, struct_generics)) => { - let typ = r#type.clone(); - let fields = constructor.fields; - let resolve_expr = Resolver::resolve_expression; - let fields = - self.resolve_constructor_fields(typ, fields, span, resolve_expr); - HirExpression::Constructor(HirConstructorExpression { - fields, - r#type, - struct_generics, - }) - } - Some(typ) => { - self.push_err(ResolverError::NonStructUsedInConstructor { typ, span }); - HirExpression::Error - } - None => HirExpression::Error, - } - } - ExpressionKind::MemberAccess(access) => { - // Validating whether the lhs actually has the rhs as a field - // needs to wait until type checking when we know the type of the lhs - HirExpression::MemberAccess(HirMemberAccess { - lhs: self.resolve_expression(access.lhs), - rhs: access.rhs, - // This is only used when lhs is a reference and we want to return a reference to rhs - is_offset: false, - }) - } - ExpressionKind::Error => HirExpression::Error, - ExpressionKind::Tuple(elements) => { - let elements = vecmap(elements, |elem| self.resolve_expression(elem)); - HirExpression::Tuple(elements) - } - // We must stay in the same function scope as the parent function to allow for closures - // to capture variables. This is currently limited to immutable variables. - ExpressionKind::Lambda(lambda) => self.in_new_scope(|this| { - let scope_index = this.scopes.current_scope_index(); - - this.lambda_stack.push(LambdaContext { captures: Vec::new(), scope_index }); - - let parameters = vecmap(lambda.parameters, |(pattern, typ)| { - let parameter = DefinitionKind::Local(None); - (this.resolve_pattern(pattern, parameter), this.resolve_inferred_type(typ)) - }); - - let return_type = this.resolve_inferred_type(lambda.return_type); - let body = this.resolve_expression(lambda.body); - - let lambda_context = this.lambda_stack.pop().unwrap(); - - HirExpression::Lambda(HirLambda { - parameters, - return_type, - body, - captures: lambda_context.captures, - }) - }), - ExpressionKind::Parenthesized(sub_expr) => return self.resolve_expression(*sub_expr), - - // The quoted expression isn't resolved since we don't want errors if variables aren't defined - ExpressionKind::Quote(block) => HirExpression::Quote(block), - ExpressionKind::Comptime(block, _) => { - HirExpression::Comptime(self.resolve_block(block)) - } - ExpressionKind::Resolved(_) => unreachable!( - "ExpressionKind::Resolved should only be emitted by the comptime interpreter" - ), - ExpressionKind::Unquote(_) => { - self.push_err(ResolverError::UnquoteUsedOutsideQuote { span: expr.span }); - HirExpression::Literal(HirLiteral::Unit) - } - }; - - // If these lines are ever changed, make sure to change the early return - // in the ExpressionKind::Variable case as well - let expr_id = self.interner.push_expr(hir_expr); - self.interner.push_expr_location(expr_id, expr.span, self.file); - expr_id - } - - fn resolve_pattern(&mut self, pattern: Pattern, definition: DefinitionKind) -> HirPattern { - self.resolve_pattern_mutable(pattern, None, definition) - } - - fn resolve_pattern_mutable( - &mut self, - pattern: Pattern, - mutable: Option, - definition: DefinitionKind, - ) -> HirPattern { - match pattern { - Pattern::Identifier(name) => { - // If this definition is mutable, do not store the rhs because it will - // not always refer to the correct value of the variable - let definition = match (mutable, definition) { - (Some(_), DefinitionKind::Local(_)) => DefinitionKind::Local(None), - (_, other) => other, - }; - let id = self.add_variable_decl(name, mutable.is_some(), true, definition); - HirPattern::Identifier(id) - } - Pattern::Mutable(pattern, span, _) => { - if let Some(first_mut) = mutable { - self.push_err(ResolverError::UnnecessaryMut { first_mut, second_mut: span }); - } - - let pattern = self.resolve_pattern_mutable(*pattern, Some(span), definition); - let location = Location::new(span, self.file); - HirPattern::Mutable(Box::new(pattern), location) - } - Pattern::Tuple(fields, span) => { - let fields = vecmap(fields, |field| { - self.resolve_pattern_mutable(field, mutable, definition.clone()) - }); - let location = Location::new(span, self.file); - HirPattern::Tuple(fields, location) - } - Pattern::Struct(name, fields, span) => { - let error_identifier = |this: &mut Self| { - // Must create a name here to return a HirPattern::Identifier. Allowing - // shadowing here lets us avoid further errors if we define ERROR_IDENT - // multiple times. - let name = ERROR_IDENT.into(); - let identifier = this.add_variable_decl(name, false, true, definition.clone()); - HirPattern::Identifier(identifier) - }; - - let (struct_type, generics) = match self.lookup_type_or_error(name) { - Some(Type::Struct(struct_type, generics)) => (struct_type, generics), - None => return error_identifier(self), - Some(typ) => { - self.push_err(ResolverError::NonStructUsedInConstructor { typ, span }); - return error_identifier(self); - } - }; - - let resolve_field = |this: &mut Self, pattern| { - this.resolve_pattern_mutable(pattern, mutable, definition.clone()) - }; - - let typ = struct_type.clone(); - let fields = self.resolve_constructor_fields(typ, fields, span, resolve_field); - - let typ = Type::Struct(struct_type, generics); - let location = Location::new(span, self.file); - HirPattern::Struct(typ, fields, location) - } - } - } - - /// Resolve all the fields of a struct constructor expression. - /// Ensures all fields are present, none are repeated, and all - /// are part of the struct. - /// - /// This is generic to allow it to work for constructor expressions - /// and constructor patterns. - fn resolve_constructor_fields( - &mut self, - struct_type: Shared, - fields: Vec<(Ident, T)>, - span: Span, - mut resolve_function: impl FnMut(&mut Self, T) -> U, - ) -> Vec<(Ident, U)> { - let mut ret = Vec::with_capacity(fields.len()); - let mut seen_fields = HashSet::new(); - let mut unseen_fields = struct_type.borrow().field_names(); - - for (field, expr) in fields { - let resolved = resolve_function(self, expr); - - if unseen_fields.contains(&field) { - unseen_fields.remove(&field); - seen_fields.insert(field.clone()); - } else if seen_fields.contains(&field) { - // duplicate field - self.push_err(ResolverError::DuplicateField { field: field.clone() }); - } else { - // field not required by struct - self.push_err(ResolverError::NoSuchField { - field: field.clone(), - struct_definition: struct_type.borrow().name.clone(), - }); - } - - ret.push((field, resolved)); - } - - if !unseen_fields.is_empty() { - self.push_err(ResolverError::MissingFields { - span, - missing_fields: unseen_fields.into_iter().map(|field| field.to_string()).collect(), - struct_definition: struct_type.borrow().name.clone(), - }); - } - - ret - } - - pub fn get_struct(&self, type_id: StructId) -> Shared { - self.interner.get_struct(type_id) - } - - pub fn get_trait_mut(&mut self, trait_id: TraitId) -> &mut Trait { - self.interner.get_trait_mut(trait_id) - } - - fn lookup(&mut self, path: Path) -> Result { - let span = path.span(); - let id = self.resolve_path(path)?; - T::try_from(id).ok_or_else(|| ResolverError::Expected { - expected: T::description(), - got: id.as_str().to_owned(), - span, - }) - } - - fn lookup_global(&mut self, path: Path) -> Result { - let span = path.span(); - let id = self.resolve_path(path)?; - - if let Some(function) = TryFromModuleDefId::try_from(id) { - return Ok(self.interner.function_definition_id(function)); - } - - if let Some(global) = TryFromModuleDefId::try_from(id) { - let global = self.interner.get_global(global); - return Ok(global.definition_id); - } - - let expected = "global variable".into(); - let got = "local variable".into(); - Err(ResolverError::Expected { span, expected, got }) - } - - /// Lookup a given struct type by name. - fn lookup_struct_or_error(&mut self, path: Path) -> Option> { - match self.lookup(path) { - Ok(struct_id) => Some(self.get_struct(struct_id)), - Err(error) => { - self.push_err(error); - None - } - } - } - - /// Lookup a given trait by name/path. - fn lookup_trait_or_error(&mut self, path: Path) -> Option<&mut Trait> { - match self.lookup(path) { - Ok(trait_id) => Some(self.get_trait_mut(trait_id)), - Err(error) => { - self.push_err(error); - None - } - } - } - - /// Looks up a given type by name. - /// This will also instantiate any struct types found. - fn lookup_type_or_error(&mut self, path: Path) -> Option { - let ident = path.as_ident(); - if ident.map_or(false, |i| i == SELF_TYPE_NAME) { - if let Some(typ) = &self.self_type { - return Some(typ.clone()); - } - } - - match self.lookup(path) { - Ok(struct_id) => { - let struct_type = self.get_struct(struct_id); - let generics = struct_type.borrow().instantiate(self.interner); - Some(Type::Struct(struct_type, generics)) - } - Err(error) => { - self.push_err(error); - None - } - } - } - - fn lookup_type_alias(&mut self, path: Path) -> Option> { - self.lookup(path).ok().map(|id| self.interner.get_type_alias(id)) - } - - // this resolves Self::some_static_method, inside an impl block (where we don't have a concrete self_type) - fn resolve_trait_static_method_by_self( - &mut self, - path: &Path, - ) -> Option<(TraitMethodId, TraitConstraint, bool)> { - let trait_id = self.trait_id?; - - if path.kind == PathKind::Plain && path.segments.len() == 2 { - let name = &path.segments[0].0.contents; - let method = &path.segments[1]; - - if name == SELF_TYPE_NAME { - let the_trait = self.interner.get_trait(trait_id); - let method = the_trait.find_method(method.0.contents.as_str())?; - - let constraint = TraitConstraint { - typ: self.self_type.clone()?, - trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { - generic.type_var.clone() - })), - trait_id, - }; - return Some((method, constraint, false)); - } - } - None - } - - // this resolves TraitName::some_static_method - fn resolve_trait_static_method( - &mut self, - path: &Path, - ) -> Option<(TraitMethodId, TraitConstraint, bool)> { - if path.kind == PathKind::Plain && path.segments.len() == 2 { - let method = &path.segments[1]; - - let mut trait_path = path.clone(); - trait_path.pop(); - let trait_id = self.lookup(trait_path).ok()?; - let the_trait = self.interner.get_trait(trait_id); - - let method = the_trait.find_method(method.0.contents.as_str())?; - let constraint = TraitConstraint { - typ: Type::TypeVariable( - the_trait.self_type_typevar.clone(), - TypeVariableKind::Normal, - ), - trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { - generic.type_var.clone() - })), - trait_id, - }; - return Some((method, constraint, false)); - } - None - } - - // This resolves a static trait method T::trait_method by iterating over the where clause - // - // Returns the trait method, trait constraint, and whether the impl is assumed from a where - // clause. This is always true since this helper searches where clauses for a generic constraint. - // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` - fn resolve_trait_method_by_named_generic( - &mut self, - path: &Path, - ) -> Option<(TraitMethodId, TraitConstraint, bool)> { - if path.segments.len() != 2 { - return None; - } - - for UnresolvedTraitConstraint { typ, trait_bound } in self.trait_bounds.clone() { - if let UnresolvedTypeData::Named(constraint_path, _, _) = &typ.typ { - // if `path` is `T::method_name`, we're looking for constraint of the form `T: SomeTrait` - if constraint_path.segments.len() == 1 - && path.segments[0] != constraint_path.last_segment() - { - continue; - } - - if let Ok(ModuleDefId::TraitId(trait_id)) = - self.resolve_path(trait_bound.trait_path.clone()) - { - let the_trait = self.interner.get_trait(trait_id); - if let Some(method) = - the_trait.find_method(path.segments.last().unwrap().0.contents.as_str()) - { - let constraint = TraitConstraint { - trait_id, - typ: self.resolve_type(typ.clone()), - trait_generics: vecmap(trait_bound.trait_generics, |typ| { - self.resolve_type(typ) - }), - }; - return Some((method, constraint, true)); - } - } - } - } - None - } - - // Try to resolve the given trait method path. - // - // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not - // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` - fn resolve_trait_generic_path( - &mut self, - path: &Path, - ) -> Option<(TraitMethodId, TraitConstraint, bool)> { - self.resolve_trait_static_method_by_self(path) - .or_else(|| self.resolve_trait_static_method(path)) - .or_else(|| self.resolve_trait_method_by_named_generic(path)) - } - - fn resolve_path(&mut self, path: Path) -> Result { - let path_resolution = self.path_resolver.resolve(self.def_maps, path, &mut None)?; - - if let Some(error) = path_resolution.error { - self.push_err(error.into()); - } - - Ok(path_resolution.module_def_id) - } - - fn resolve_block(&mut self, block_expr: BlockExpression) -> HirBlockExpression { - let statements = - self.in_new_scope(|this| vecmap(block_expr.statements, |stmt| this.intern_stmt(stmt))); - HirBlockExpression { statements } - } - - pub fn intern_block(&mut self, block: BlockExpression) -> ExprId { - let hir_block = HirExpression::Block(self.resolve_block(block)); - self.interner.push_expr(hir_block) - } - - fn eval_global_as_array_length(&mut self, global: GlobalId, path: &Path) -> u32 { - let Some(stmt) = self.interner.get_global_let_statement(global) else { - let path = path.clone(); - self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); - return 0; - }; - - let length = stmt.expression; - let span = self.interner.expr_span(&length); - let result = self.try_eval_array_length_id(length, span); - - match result.map(|length| length.try_into()) { - Ok(Ok(length_value)) => return length_value, - Ok(Err(_cast_err)) => self.push_err(ResolverError::IntegerTooLarge { span }), - Err(Some(error)) => self.push_err(error), - Err(None) => (), - } - 0 - } - - fn try_eval_array_length_id( - &self, - rhs: ExprId, - span: Span, - ) -> Result> { - // Arbitrary amount of recursive calls to try before giving up - let fuel = 100; - self.try_eval_array_length_id_with_fuel(rhs, span, fuel) - } - - fn try_eval_array_length_id_with_fuel( - &self, - rhs: ExprId, - span: Span, - fuel: u32, - ) -> Result> { - if fuel == 0 { - // If we reach here, it is likely from evaluating cyclic globals. We expect an error to - // be issued for them after name resolution so issue no error now. - return Err(None); - } - - match self.interner.expression(&rhs) { - HirExpression::Literal(HirLiteral::Integer(int, false)) => { - int.try_into_u128().ok_or(Some(ResolverError::IntegerTooLarge { span })) - } - HirExpression::Ident(ident, _) => { - let definition = self.interner.definition(ident.id); - match definition.kind { - DefinitionKind::Global(global_id) => { - let let_statement = self.interner.get_global_let_statement(global_id); - if let Some(let_statement) = let_statement { - let expression = let_statement.expression; - self.try_eval_array_length_id_with_fuel(expression, span, fuel - 1) - } else { - Err(Some(ResolverError::InvalidArrayLengthExpr { span })) - } - } - _ => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), - } - } - HirExpression::Infix(infix) => { - let lhs = self.try_eval_array_length_id_with_fuel(infix.lhs, span, fuel - 1)?; - let rhs = self.try_eval_array_length_id_with_fuel(infix.rhs, span, fuel - 1)?; - - match infix.operator.kind { - BinaryOpKind::Add => Ok(lhs + rhs), - BinaryOpKind::Subtract => Ok(lhs - rhs), - BinaryOpKind::Multiply => Ok(lhs * rhs), - BinaryOpKind::Divide => Ok(lhs / rhs), - BinaryOpKind::Equal => Ok((lhs == rhs) as u128), - BinaryOpKind::NotEqual => Ok((lhs != rhs) as u128), - BinaryOpKind::Less => Ok((lhs < rhs) as u128), - BinaryOpKind::LessEqual => Ok((lhs <= rhs) as u128), - BinaryOpKind::Greater => Ok((lhs > rhs) as u128), - BinaryOpKind::GreaterEqual => Ok((lhs >= rhs) as u128), - BinaryOpKind::And => Ok(lhs & rhs), - BinaryOpKind::Or => Ok(lhs | rhs), - BinaryOpKind::Xor => Ok(lhs ^ rhs), - BinaryOpKind::ShiftRight => Ok(lhs >> rhs), - BinaryOpKind::ShiftLeft => Ok(lhs << rhs), - BinaryOpKind::Modulo => Ok(lhs % rhs), - } - } - HirExpression::Cast(cast) => { - let lhs = self.try_eval_array_length_id_with_fuel(cast.lhs, span, fuel - 1)?; - let lhs_value = Value::Field(lhs.into()); - let evaluated_value = - Interpreter::evaluate_cast_one_step(&cast, rhs, lhs_value, self.interner) - .map_err(|error| Some(ResolverError::ArrayLengthInterpreter { error }))?; - - evaluated_value - .to_u128() - .ok_or_else(|| Some(ResolverError::InvalidArrayLengthExpr { span })) - } - _other => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), - } - } - - fn resolve_fmt_str_literal(&mut self, str: String, call_expr_span: Span) -> HirLiteral { - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") - .expect("ICE: an invalid regex pattern was used for checking format strings"); - let mut fmt_str_idents = Vec::new(); - for field in re.find_iter(&str) { - let matched_str = field.as_str(); - let ident_name = &matched_str[1..(matched_str.len() - 1)]; - - let scope_tree = self.scopes.current_scope_tree(); - let variable = scope_tree.find(ident_name); - if let Some((old_value, _)) = variable { - old_value.num_times_used += 1; - let ident = HirExpression::Ident(old_value.ident.clone(), None); - let expr_id = self.interner.push_expr(ident); - self.interner.push_expr_location(expr_id, call_expr_span, self.file); - fmt_str_idents.push(expr_id); - } else if ident_name.parse::().is_ok() { - self.errors.push(ResolverError::NumericConstantInFormatString { - name: ident_name.to_owned(), - span: call_expr_span, - }); - } else { - self.errors.push(ResolverError::VariableNotDeclared { - name: ident_name.to_owned(), - span: call_expr_span, - }); - } - } - HirLiteral::FmtStr(str, fmt_str_idents) - } - - fn check_break_continue(&mut self, is_break: bool, span: Span) { - if !self.in_unconstrained_fn { - self.push_err(ResolverError::JumpInConstrainedFn { is_break, span }); - } - if self.nested_loops == 0 { - self.push_err(ResolverError::JumpOutsideLoop { is_break, span }); - } - } -} - -/// Gives an error if a user tries to create a mutable reference -/// to an immutable variable. -pub fn verify_mutable_reference(interner: &NodeInterner, rhs: ExprId) -> Result<(), ResolverError> { - match interner.expression(&rhs) { - HirExpression::MemberAccess(member_access) => { - verify_mutable_reference(interner, member_access.lhs) - } - HirExpression::Index(_) => { - let span = interner.expr_span(&rhs); - Err(ResolverError::MutableReferenceToArrayElement { span }) - } - HirExpression::Ident(ident, _) => { - if let Some(definition) = interner.try_definition(ident.id) { - if !definition.mutable { - return Err(ResolverError::MutableReferenceToImmutableVariable { - span: interner.expr_span(&rhs), - variable: definition.name.clone(), - }); - } - } - Ok(()) - } - _ => Ok(()), - } -} diff --git a/compiler/noirc_frontend/src/hir/resolution/structs.rs b/compiler/noirc_frontend/src/hir/resolution/structs.rs deleted file mode 100644 index f62e5589d74..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/structs.rs +++ /dev/null @@ -1,83 +0,0 @@ -use std::collections::BTreeMap; - -use fm::FileId; -use iter_extended::vecmap; - -use crate::ast::Ident; -use crate::{ - graph::CrateId, - hir::{ - def_collector::dc_crate::{CompilationError, UnresolvedStruct}, - def_map::ModuleId, - Context, - }, - node_interner::StructId, - Generics, Type, -}; - -use super::{errors::ResolverError, path_resolver::StandardPathResolver, resolver::Resolver}; - -/// Create the mappings from TypeId -> StructType -/// so that expressions can access the fields of structs -pub(crate) fn resolve_structs( - context: &mut Context, - structs: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - // This is necessary to avoid cloning the entire struct map - // when adding checks after each struct field is resolved. - let struct_ids = structs.keys().copied().collect::>(); - - // Resolve each field in each struct. - // Each struct should already be present in the NodeInterner after def collection. - for (type_id, typ) in structs { - let file_id = typ.file_id; - let (generics, fields, resolver_errors) = - resolve_struct_fields(context, crate_id, type_id, typ); - errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); - context.def_interner.update_struct(type_id, |struct_def| { - struct_def.set_fields(fields); - struct_def.generics = generics; - }); - } - - // Check whether the struct fields have nested slices - // We need to check after all structs are resolved to - // make sure every struct's fields is accurately set. - for id in struct_ids { - let struct_type = context.def_interner.get_struct(id); - // Only handle structs without generics as any generics args will be checked - // after monomorphization when performing SSA codegen - if struct_type.borrow().generics.is_empty() { - let fields = struct_type.borrow().get_fields(&[]); - for field in fields.iter() { - if field.1.is_nested_slice() { - errors.push(( - ResolverError::NestedSlices { span: struct_type.borrow().location.span } - .into(), - struct_type.borrow().location.file, - )); - } - } - } - } - - errors -} - -fn resolve_struct_fields( - context: &mut Context, - krate: CrateId, - type_id: StructId, - unresolved: UnresolvedStruct, -) -> (Generics, Vec<(Ident, Type)>, Vec) { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: unresolved.module_id, krate }); - let file_id = unresolved.file_id; - let (generics, fields, errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) - .resolve_struct_fields(unresolved.struct_def, type_id); - - (generics, fields, errors) -} diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs deleted file mode 100644 index 28ee70393cd..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ /dev/null @@ -1,506 +0,0 @@ -use std::collections::{BTreeMap, HashSet}; - -use fm::FileId; -use iter_extended::vecmap; -use noirc_errors::Location; - -use crate::ast::{Ident, ItemVisibility, Path, TraitItem, UnresolvedGeneric}; -use crate::{ - graph::CrateId, - hir::{ - def_collector::{ - dc_crate::{CompilationError, UnresolvedTrait, UnresolvedTraitImpl}, - errors::{DefCollectorErrorKind, DuplicateType}, - }, - def_map::{CrateDefMap, ModuleDefId, ModuleId}, - Context, - }, - hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, - node_interner::{FuncId, NodeInterner, TraitId}, - GenericTypeVars, Shared, Type, TypeVariableKind, -}; - -use super::{ - functions, get_module_mut, get_struct_type, - import::{PathResolution, PathResolutionError}, - path_resolver::{PathResolver, StandardPathResolver}, - resolver::Resolver, - take_errors, -}; - -/// Create the mappings from TypeId -> TraitType -/// so that expressions can access the elements of traits -pub(crate) fn resolve_traits( - context: &mut Context, - traits: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut all_errors = Vec::new(); - - for (trait_id, unresolved_trait) in traits { - let file_id = context.def_maps[&crate_id].file_id(unresolved_trait.module_id); - let generics = context.resolve_generics( - &unresolved_trait.trait_def.generics, - &mut all_errors, - file_id, - ); - let generic_type_vars = generics.iter().map(|generic| generic.type_var.clone()).collect(); - - context.def_interner.push_empty_trait(trait_id, &unresolved_trait, generics); - - // Resolve order - // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) - let _ = resolve_trait_types(context, crate_id, &unresolved_trait); - // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) - let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); - // 3. Trait Methods - let (methods, errors) = resolve_trait_methods( - context, - trait_id, - crate_id, - &unresolved_trait, - &generic_type_vars, - ); - - all_errors.extend(errors); - - context.def_interner.update_trait(trait_id, |trait_def| { - trait_def.set_methods(methods); - }); - - // This check needs to be after the trait's methods are set since - // the interner may set `interner.ordering_type` based on the result type - // of the Cmp trait, if this is it. - if crate_id.is_stdlib() { - context.def_interner.try_add_infix_operator_trait(trait_id); - context.def_interner.try_add_prefix_operator_trait(trait_id); - } - } - all_errors -} - -fn resolve_trait_types( - _context: &mut Context, - _crate_id: CrateId, - _unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - // TODO - (vec![], vec![]) -} -fn resolve_trait_constants( - _context: &mut Context, - _crate_id: CrateId, - _unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - // TODO - (vec![], vec![]) -} - -fn resolve_trait_methods( - context: &mut Context, - trait_id: TraitId, - crate_id: CrateId, - unresolved_trait: &UnresolvedTrait, - trait_generics: &GenericTypeVars, -) -> (Vec, Vec<(CompilationError, FileId)>) { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - - let path_resolver = StandardPathResolver::new(ModuleId { - local_id: unresolved_trait.module_id, - krate: crate_id, - }); - let file = def_maps[&crate_id].file_id(unresolved_trait.module_id); - - let mut functions = vec![]; - let mut resolver_errors = vec![]; - - for item in &unresolved_trait.trait_def.items { - if let TraitItem::Function { - name, - generics, - parameters, - return_type, - where_clause, - body: _, - } = item - { - let the_trait = interner.get_trait(trait_id); - let self_typevar = the_trait.self_type_typevar.clone(); - let self_type = Type::TypeVariable(self_typevar.clone(), TypeVariableKind::Normal); - let name_span = the_trait.name.span(); - - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(generics); - - resolver.add_existing_generics(&unresolved_trait.trait_def.generics, trait_generics); - resolver.add_existing_generic( - &UnresolvedGeneric::Variable(Ident::from("Self")), - name_span, - self_typevar, - ); - resolver.set_self_type(Some(self_type.clone())); - - let func_id = unresolved_trait.method_ids[&name.0.contents]; - let (_, func_meta) = resolver.resolve_trait_function( - name, - generics, - parameters, - return_type, - where_clause, - func_id, - ); - resolver.interner.push_fn_meta(func_meta, func_id); - - let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); - let return_type = resolver.resolve_type(return_type.get_type().into_owned()); - - let generics = vecmap(resolver.get_generics(), |generic| generic.type_var.clone()); - - let default_impl_list: Vec<_> = unresolved_trait - .fns_with_default_impl - .functions - .iter() - .filter(|(_, _, q)| q.name() == name.0.contents) - .collect(); - - let default_impl = if default_impl_list.len() == 1 { - Some(Box::new(default_impl_list[0].2.clone())) - } else { - None - }; - - let no_environment = Box::new(Type::Unit); - let function_type = Type::Function(arguments, Box::new(return_type), no_environment); - - functions.push(TraitFunction { - name: name.clone(), - typ: Type::Forall(generics, Box::new(function_type)), - location: Location::new(name.span(), unresolved_trait.file_id), - default_impl, - default_impl_module_id: unresolved_trait.module_id, - }); - - let errors = resolver.take_errors().into_iter(); - resolver_errors.extend(errors.map(|resolution_error| (resolution_error.into(), file))); - } - } - (functions, resolver_errors) -} - -fn collect_trait_impl_methods( - interner: &mut NodeInterner, - def_maps: &BTreeMap, - crate_id: CrateId, - trait_id: TraitId, - trait_impl: &mut UnresolvedTraitImpl, -) -> Vec<(CompilationError, FileId)> { - // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation - // for a particular method, the default implementation will be added at that slot. - let mut ordered_methods = Vec::new(); - - // check whether the trait implementation is in the same crate as either the trait or the type - let mut errors = - check_trait_impl_crate_coherence(interner, trait_id, trait_impl, crate_id, def_maps); - // set of function ids that have a corresponding method in the trait - let mut func_ids_in_trait = HashSet::new(); - - // Temporarily take ownership of the trait's methods so we can iterate over them - // while also mutating the interner - let the_trait = interner.get_trait_mut(trait_id); - let methods = std::mem::take(&mut the_trait.methods); - - for method in &methods { - let overrides: Vec<_> = trait_impl - .methods - .functions - .iter() - .filter(|(_, _, f)| f.name() == method.name.0.contents) - .collect(); - - if overrides.is_empty() { - if let Some(default_impl) = &method.default_impl { - // copy 'where' clause from unresolved trait impl - let mut default_impl_clone = default_impl.clone(); - default_impl_clone.def.where_clause.extend(trait_impl.where_clause.clone()); - - let func_id = interner.push_empty_fn(); - let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; - let location = Location::new(default_impl.def.span, trait_impl.file_id); - interner.push_function(func_id, &default_impl.def, module, location); - func_ids_in_trait.insert(func_id); - ordered_methods.push((method.default_impl_module_id, func_id, *default_impl_clone)); - } else { - let error = DefCollectorErrorKind::TraitMissingMethod { - trait_name: interner.get_trait(trait_id).name.clone(), - method_name: method.name.clone(), - trait_impl_span: trait_impl.object_type.span.expect("type must have a span"), - }; - errors.push((error.into(), trait_impl.file_id)); - } - } else { - for (_, func_id, _) in &overrides { - func_ids_in_trait.insert(*func_id); - } - - if overrides.len() > 1 { - let error = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::TraitAssociatedFunction, - first_def: overrides[0].2.name_ident().clone(), - second_def: overrides[1].2.name_ident().clone(), - }; - errors.push((error.into(), trait_impl.file_id)); - } - - ordered_methods.push(overrides[0].clone()); - } - } - - // Restore the methods that were taken before the for loop - let the_trait = interner.get_trait_mut(trait_id); - the_trait.set_methods(methods); - - // Emit MethodNotInTrait error for methods in the impl block that - // don't have a corresponding method signature defined in the trait - for (_, func_id, func) in &trait_impl.methods.functions { - if !func_ids_in_trait.contains(func_id) { - let error = DefCollectorErrorKind::MethodNotInTrait { - trait_name: the_trait.name.clone(), - impl_method: func.name_ident().clone(), - }; - errors.push((error.into(), trait_impl.file_id)); - } - } - - trait_impl.methods.functions = ordered_methods; - trait_impl.methods.trait_id = Some(trait_id); - errors -} - -fn collect_trait_impl( - context: &mut Context, - crate_id: CrateId, - trait_impl: &mut UnresolvedTraitImpl, -) -> Vec<(CompilationError, FileId)> { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - let unresolved_type = trait_impl.object_type.clone(); - let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; - trait_impl.trait_id = - match resolve_trait_by_path(def_maps, module, trait_impl.trait_path.clone()) { - Ok((trait_id, warning)) => { - if let Some(warning) = warning { - errors.push(( - DefCollectorErrorKind::PathResolutionError(warning).into(), - trait_impl.file_id, - )); - } - Some(trait_id) - } - Err(error) => { - errors.push((error.into(), trait_impl.file_id)); - None - } - }; - - if let Some(trait_id) = trait_impl.trait_id { - errors - .extend(collect_trait_impl_methods(interner, def_maps, crate_id, trait_id, trait_impl)); - - let path_resolver = StandardPathResolver::new(module); - let file = def_maps[&crate_id].file_id(trait_impl.module_id); - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(&trait_impl.generics); - - let typ = resolver.resolve_type(unresolved_type); - errors.extend(take_errors(trait_impl.file_id, resolver)); - - if let Some(struct_type) = get_struct_type(&typ) { - let struct_type = struct_type.borrow(); - let module = get_module_mut(def_maps, struct_type.id.module_id()); - - for (_, method_id, method) in &trait_impl.methods.functions { - // If this method was already declared, remove it from the module so it cannot - // be accessed with the `TypeName::method` syntax. We'll check later whether the - // object types in each method overlap or not. If they do, we issue an error. - // If not, that is specialization which is allowed. - if module - .declare_function( - method.name_ident().clone(), - ItemVisibility::Public, - *method_id, - ) - .is_err() - { - module.remove_function(method.name_ident()); - } - } - } - } - errors -} - -pub(crate) fn collect_trait_impls( - context: &mut Context, - crate_id: CrateId, - collected_impls: &mut [UnresolvedTraitImpl], -) -> Vec<(CompilationError, FileId)> { - collected_impls - .iter_mut() - .flat_map(|trait_impl| collect_trait_impl(context, crate_id, trait_impl)) - .collect() -} - -fn check_trait_impl_crate_coherence( - interner: &mut NodeInterner, - trait_id: TraitId, - trait_impl: &UnresolvedTraitImpl, - current_crate: CrateId, - def_maps: &BTreeMap, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - - let module = ModuleId { krate: current_crate, local_id: trait_impl.module_id }; - let file = def_maps[¤t_crate].file_id(trait_impl.module_id); - let path_resolver = StandardPathResolver::new(module); - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - - let object_crate = match resolver.resolve_type(trait_impl.object_type.clone()) { - Type::Struct(struct_type, _) => struct_type.borrow().id.krate(), - _ => CrateId::Dummy, - }; - - let the_trait = interner.get_trait(trait_id); - if current_crate != the_trait.crate_id && current_crate != object_crate { - let error = DefCollectorErrorKind::TraitImplOrphaned { - span: trait_impl.object_type.span.expect("object type must have a span"), - }; - errors.push((error.into(), trait_impl.file_id)); - } - - errors -} - -pub(crate) fn resolve_trait_by_path( - def_maps: &BTreeMap, - module: ModuleId, - path: Path, -) -> Result<(TraitId, Option), DefCollectorErrorKind> { - let path_resolver = StandardPathResolver::new(module); - - match path_resolver.resolve(def_maps, path.clone(), &mut None) { - Ok(PathResolution { module_def_id: ModuleDefId::TraitId(trait_id), error }) => { - Ok((trait_id, error)) - } - Ok(_) => Err(DefCollectorErrorKind::NotATrait { not_a_trait_name: path }), - Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), - } -} - -pub(crate) fn resolve_trait_impls( - context: &mut Context, - traits: Vec, - crate_id: CrateId, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let interner = &mut context.def_interner; - let mut methods = Vec::<(FileId, FuncId)>::new(); - - for trait_impl in traits { - let unresolved_type = trait_impl.object_type; - let local_mod_id = trait_impl.module_id; - let module_id = ModuleId { krate: crate_id, local_id: local_mod_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let self_type_span = unresolved_type.span; - - let mut resolver = - Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - resolver.add_generics(&trait_impl.generics); - - let trait_generics = - vecmap(&trait_impl.trait_generics, |generic| resolver.resolve_type(generic.clone())); - - let self_type = resolver.resolve_type(unresolved_type.clone()); - let impl_generics = resolver.get_generics().to_vec(); - let impl_id = interner.next_trait_impl_id(); - - let mut impl_methods = functions::resolve_function_set( - interner, - crate_id, - &context.def_maps, - trait_impl.methods.clone(), - Some(self_type.clone()), - Some(impl_id), - impl_generics.clone(), - errors, - ); - - let maybe_trait_id = trait_impl.trait_id; - if let Some(trait_id) = maybe_trait_id { - for (_, func) in &impl_methods { - interner.set_function_trait(*func, self_type.clone(), trait_id); - } - } - - if matches!(self_type, Type::MutableReference(_)) { - let span = self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()); - let error = DefCollectorErrorKind::MutableReferenceInTraitImpl { span }; - errors.push((error.into(), trait_impl.file_id)); - } - - let mut new_resolver = - Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - - new_resolver.set_generics(impl_generics.clone()); - new_resolver.set_self_type(Some(self_type.clone())); - - if let Some(trait_id) = maybe_trait_id { - let where_clause = trait_impl - .where_clause - .into_iter() - .flat_map(|item| new_resolver.resolve_trait_constraint(item)) - .collect(); - - let resolver_errors = new_resolver.take_errors().into_iter(); - errors.extend(resolver_errors.map(|error| (error.into(), trait_impl.file_id))); - - let resolved_trait_impl = Shared::new(TraitImpl { - ident: trait_impl.trait_path.last_segment().clone(), - typ: self_type.clone(), - trait_id, - trait_generics: trait_generics.clone(), - file: trait_impl.file_id, - where_clause, - methods: vecmap(&impl_methods, |(_, func_id)| *func_id), - }); - - let impl_generics = vecmap(impl_generics, |generic| generic.type_var); - - if let Err((prev_span, prev_file)) = interner.add_trait_implementation( - self_type.clone(), - trait_id, - trait_generics, - impl_id, - impl_generics, - resolved_trait_impl, - ) { - let error = DefCollectorErrorKind::OverlappingImpl { - typ: self_type.clone(), - span: self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()), - }; - errors.push((error.into(), trait_impl.file_id)); - - // The 'previous impl defined here' note must be a separate error currently - // since it may be in a different file and all errors have the same file id. - let error = DefCollectorErrorKind::OverlappingImplNote { span: prev_span }; - errors.push((error.into(), prev_file)); - } - - methods.append(&mut impl_methods); - } - } - - methods -} diff --git a/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs deleted file mode 100644 index 2e5ce611a7f..00000000000 --- a/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs +++ /dev/null @@ -1,33 +0,0 @@ -use super::{path_resolver::StandardPathResolver, resolver::Resolver}; -use crate::{ - graph::CrateId, - hir::{ - def_collector::dc_crate::{CompilationError, UnresolvedTypeAlias}, - def_map::ModuleId, - Context, - }, - node_interner::TypeAliasId, -}; -use fm::FileId; -use std::collections::BTreeMap; - -pub(crate) fn resolve_type_aliases( - context: &mut Context, - type_aliases: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - for (alias_id, unresolved_typ) in type_aliases { - let path_resolver = StandardPathResolver::new(ModuleId { - local_id: unresolved_typ.module_id, - krate: crate_id, - }); - let file = unresolved_typ.file_id; - let (typ, generics, resolver_errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) - .resolve_type_alias(unresolved_typ.type_alias_def, alias_id); - errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); - context.def_interner.set_type_alias(alias_id, typ, generics); - } - errors -} diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs deleted file mode 100644 index 9dfe0901016..00000000000 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ /dev/null @@ -1,1395 +0,0 @@ -use iter_extended::vecmap; -use noirc_errors::Span; - -use crate::ast::{BinaryOpKind, IntegerBitSize, UnaryOp}; -use crate::hir_def::expr::HirCallExpression; -use crate::macros_api::Signedness; -use crate::{ - hir::{resolution::resolver::verify_mutable_reference, type_check::errors::Source}, - hir_def::{ - expr::{ - self, HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirExpression, HirIdent, - HirLiteral, HirMethodCallExpression, HirMethodReference, HirPrefixExpression, ImplKind, - }, - types::Type, - }, - node_interner::{DefinitionKind, ExprId, FuncId, TraitId, TraitImplKind, TraitMethodId}, - TypeBinding, TypeBindings, TypeVariableKind, -}; - -use super::NoMatchingImplFoundError; -use super::{errors::TypeCheckError, TypeChecker}; - -impl<'interner> TypeChecker<'interner> { - fn check_if_deprecated(&mut self, expr: &ExprId) { - if let HirExpression::Ident(expr::HirIdent { location, id, impl_kind: _ }, _) = - self.interner.expression(expr) - { - if let Some(DefinitionKind::Function(func_id)) = - self.interner.try_definition(id).map(|def| &def.kind) - { - let attributes = self.interner.function_attributes(func_id); - if let Some(note) = attributes.get_deprecated_note() { - self.errors.push(TypeCheckError::CallDeprecated { - name: self.interner.definition_name(id).to_string(), - note, - span: location.span, - }); - } - } - } - } - - fn is_unconstrained_call(&self, expr: &ExprId) -> bool { - if let HirExpression::Ident(expr::HirIdent { id, .. }, _) = self.interner.expression(expr) { - if let Some(DefinitionKind::Function(func_id)) = - self.interner.try_definition(id).map(|def| &def.kind) - { - let modifiers = self.interner.function_modifiers(func_id); - return modifiers.is_unconstrained; - } - } - false - } - - fn check_hir_array_literal( - &mut self, - hir_array_literal: HirArrayLiteral, - ) -> (Result>, Box) { - match hir_array_literal { - HirArrayLiteral::Standard(arr) => { - let elem_types = vecmap(&arr, |arg| self.check_expression(arg)); - - let first_elem_type = elem_types - .first() - .cloned() - .unwrap_or_else(|| self.interner.next_type_variable()); - - // Check if the array is homogeneous - for (index, elem_type) in elem_types.iter().enumerate().skip(1) { - let location = self.interner.expr_location(&arr[index]); - - elem_type.unify(&first_elem_type, &mut self.errors, || { - TypeCheckError::NonHomogeneousArray { - first_span: self.interner.expr_location(&arr[0]).span, - first_type: first_elem_type.to_string(), - first_index: index, - second_span: location.span, - second_type: elem_type.to_string(), - second_index: index + 1, - } - .add_context("elements in an array must have the same type") - }); - } - - (Ok(arr.len() as u32), Box::new(first_elem_type.clone())) - } - HirArrayLiteral::Repeated { repeated_element, length } => { - let elem_type = self.check_expression(&repeated_element); - let length = match length { - Type::Constant(length) => Ok(length), - other => Err(Box::new(other)), - }; - (length, Box::new(elem_type)) - } - } - } - - /// Infers a type for a given expression, and return this type. - /// As a side-effect, this function will also remember this type in the NodeInterner - /// for the given expr_id key. - /// - /// This function also converts any HirExpression::MethodCalls `a.foo(b, c)` into - /// an equivalent HirExpression::Call in the form `foo(a, b, c)`. This cannot - /// be done earlier since we need to know the type of the object `a` to resolve which - /// function `foo` to refer to. - pub(crate) fn check_expression(&mut self, expr_id: &ExprId) -> Type { - let typ = match self.interner.expression(expr_id) { - HirExpression::Ident(ident, generics) => self.check_ident(ident, expr_id, generics), - HirExpression::Literal(literal) => match literal { - HirLiteral::Array(hir_array_literal) => { - let (length, elem_type) = self.check_hir_array_literal(hir_array_literal); - Type::Array( - length.map_or_else( - |typ| typ, - |constant| Box::new(Type::constant_variable(constant, self.interner)), - ), - elem_type, - ) - } - HirLiteral::Slice(hir_array_literal) => { - let (length_type, elem_type) = self.check_hir_array_literal(hir_array_literal); - match length_type { - Ok(_length) => Type::Slice(elem_type), - Err(_non_constant) => { - self.errors.push(TypeCheckError::NonConstantSliceLength { - span: self.interner.expr_span(expr_id), - }); - Type::Error - } - } - } - HirLiteral::Bool(_) => Type::Bool, - HirLiteral::Integer(_, _) => self.polymorphic_integer_or_field(), - HirLiteral::Str(string) => { - let len = Type::Constant(string.len() as u32); - Type::String(Box::new(len)) - } - HirLiteral::FmtStr(string, idents) => { - let len = Type::Constant(string.len() as u32); - let types = vecmap(&idents, |elem| self.check_expression(elem)); - Type::FmtString(Box::new(len), Box::new(Type::Tuple(types))) - } - HirLiteral::Unit => Type::Unit, - }, - HirExpression::Infix(infix_expr) => { - // The type of the infix expression must be looked up from a type table - let lhs_type = self.check_expression(&infix_expr.lhs); - let rhs_type = self.check_expression(&infix_expr.rhs); - - let lhs_span = self.interner.expr_span(&infix_expr.lhs); - let rhs_span = self.interner.expr_span(&infix_expr.rhs); - let span = lhs_span.merge(rhs_span); - - let operator = &infix_expr.operator; - match self.infix_operand_type_rules(&lhs_type, operator, &rhs_type, span) { - Ok((typ, use_impl)) => { - if use_impl { - let id = infix_expr.trait_method_id; - - // Delay checking the trait constraint until the end of the function. - // Checking it now could bind an unbound type variable to any type - // that implements the trait. - let constraint = crate::hir_def::traits::TraitConstraint { - typ: lhs_type.clone(), - trait_id: id.trait_id, - trait_generics: Vec::new(), - }; - self.trait_constraints.push((constraint, *expr_id)); - self.typecheck_operator_method(*expr_id, id, &lhs_type, span); - } - typ - } - Err(error) => { - self.errors.push(error); - Type::Error - } - } - } - HirExpression::Index(index_expr) => self.check_index_expression(expr_id, index_expr), - HirExpression::Call(call_expr) => { - let function = self.check_expression(&call_expr.func); - - let args = vecmap(&call_expr.arguments, |arg| { - let typ = self.check_expression(arg); - (typ, *arg, self.interner.expr_span(arg)) - }); - - let span = self.interner.expr_span(expr_id); - self.check_call(&call_expr, function, args, span) - } - HirExpression::MethodCall(mut method_call) => { - let method_call_span = self.interner.expr_span(expr_id); - let object = method_call.object; - let object_span = self.interner.expr_span(&method_call.object); - let mut object_type = self.check_expression(&method_call.object).follow_bindings(); - let method_name = method_call.method.0.contents.as_str(); - match self.lookup_method(&object_type, method_name, expr_id) { - Some(method_ref) => { - // Desugar the method call into a normal, resolved function call - // so that the backend doesn't need to worry about methods - let location = method_call.location; - - // Automatically add `&mut` if the method expects a mutable reference and - // the object is not already one. - let func_id = match &method_ref { - HirMethodReference::FuncId(func_id) => *func_id, - HirMethodReference::TraitMethodId(method_id, _) => { - let id = self.interner.trait_method_id(*method_id); - let definition = self.interner.definition(id); - let DefinitionKind::Function(func_id) = definition.kind else { - unreachable!( - "Expected trait function to be a DefinitionKind::Function" - ) - }; - func_id - } - }; - - if func_id != FuncId::dummy_id() { - let function_type = self.interner.function_meta(&func_id).typ.clone(); - self.try_add_mutable_reference_to_object( - &mut method_call, - &function_type, - &mut object_type, - ); - } - - // These arguments will be given to the desugared function call. - // Compared to the method arguments, they also contain the object. - let mut function_args = Vec::with_capacity(method_call.arguments.len() + 1); - - function_args.push((object_type.clone(), object, object_span)); - - for arg in method_call.arguments.iter() { - let span = self.interner.expr_span(arg); - let typ = self.check_expression(arg); - function_args.push((typ, *arg, span)); - } - - // TODO: update object_type here? - let ((function_id, _), function_call) = method_call.into_function_call( - &method_ref, - object_type, - location, - self.interner, - ); - - let func_type = self.check_expression(&function_id); - - // Type check the new call now that it has been changed from a method call - // to a function call. This way we avoid duplicating code. - // We call `check_call` rather than `check_expression` directly as we want to avoid - // resolving the object type again once it is part of the arguments. - let typ = self.check_call( - &function_call, - func_type, - function_args, - method_call_span, - ); - - self.interner.replace_expr(expr_id, HirExpression::Call(function_call)); - - typ - } - None => Type::Error, - } - } - HirExpression::Cast(cast_expr) => { - // Evaluate the LHS - let lhs_type = self.check_expression(&cast_expr.lhs); - let span = self.interner.expr_span(expr_id); - self.check_cast(lhs_type, cast_expr.r#type, span) - } - HirExpression::Block(block_expr) => self.check_block(block_expr), - HirExpression::Prefix(prefix_expr) => { - let rhs_type = self.check_expression(&prefix_expr.rhs); - let span = self.interner.expr_span(&prefix_expr.rhs); - self.type_check_prefix_operand(&prefix_expr.operator, &rhs_type, span) - } - HirExpression::If(if_expr) => self.check_if_expr(&if_expr, expr_id), - HirExpression::Constructor(constructor) => self.check_constructor(constructor, expr_id), - HirExpression::MemberAccess(access) => self.check_member_access(access, *expr_id), - HirExpression::Error => Type::Error, - HirExpression::Tuple(elements) => { - Type::Tuple(vecmap(&elements, |elem| self.check_expression(elem))) - } - HirExpression::Lambda(lambda) => { - let captured_vars = vecmap(lambda.captures, |capture| { - self.interner.definition_type(capture.ident.id) - }); - - let env_type: Type = - if captured_vars.is_empty() { Type::Unit } else { Type::Tuple(captured_vars) }; - - let params = vecmap(lambda.parameters, |(pattern, typ)| { - self.bind_pattern(&pattern, typ.clone()); - typ - }); - - let actual_return = self.check_expression(&lambda.body); - - let span = self.interner.expr_span(&lambda.body); - self.unify(&actual_return, &lambda.return_type, || TypeCheckError::TypeMismatch { - expected_typ: lambda.return_type.to_string(), - expr_typ: actual_return.to_string(), - expr_span: span, - }); - - Type::Function(params, Box::new(lambda.return_type), Box::new(env_type)) - } - HirExpression::Quote(_) => Type::Quoted(crate::QuotedType::Quoted), - HirExpression::Comptime(block) => self.check_block(block), - - // Unquote should be inserted & removed by the comptime interpreter. - // Even if we allowed it here, we wouldn't know what type to give to the result. - HirExpression::Unquote(block) => { - unreachable!("Unquote remaining during type checking {block:?}") - } - }; - - self.interner.push_expr_type(*expr_id, typ.clone()); - typ - } - - fn check_call( - &mut self, - call: &HirCallExpression, - func_type: Type, - args: Vec<(Type, ExprId, Span)>, - span: Span, - ) -> Type { - // Need to setup these flags here as `self` is borrowed mutably to type check the rest of the call expression - // These flags are later used to type check calls to unconstrained functions from constrained functions - let func_mod = self.current_function.map(|func| self.interner.function_modifiers(&func)); - let is_current_func_constrained = - func_mod.map_or(true, |func_mod| !func_mod.is_unconstrained); - - let is_unconstrained_call = self.is_unconstrained_call(&call.func); - self.check_if_deprecated(&call.func); - - // Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime - if is_current_func_constrained && is_unconstrained_call { - for (typ, _, _) in args.iter() { - if !typ.is_valid_for_unconstrained_boundary() { - self.errors.push(TypeCheckError::ConstrainedReferenceToUnconstrained { span }); - } - } - } - - let return_type = self.bind_function_type(func_type, args, span); - - // Check that we are not passing a slice from an unconstrained runtime to a constrained runtime - if is_current_func_constrained && is_unconstrained_call { - if return_type.contains_slice() { - self.errors.push(TypeCheckError::UnconstrainedSliceReturnToConstrained { span }); - } else if matches!(&return_type.follow_bindings(), Type::MutableReference(_)) { - self.errors.push(TypeCheckError::UnconstrainedReferenceToConstrained { span }); - } - }; - - return_type - } - - fn check_block(&mut self, block: HirBlockExpression) -> Type { - let mut block_type = Type::Unit; - - let statements = block.statements(); - for (i, stmt) in statements.iter().enumerate() { - let expr_type = self.check_statement(stmt); - - if let crate::hir_def::stmt::HirStatement::Semi(expr) = self.interner.statement(stmt) { - let inner_expr_type = self.interner.id_type(expr); - let span = self.interner.expr_span(&expr); - - self.unify(&inner_expr_type, &Type::Unit, || TypeCheckError::UnusedResultError { - expr_type: inner_expr_type.clone(), - expr_span: span, - }); - } - - if i + 1 == statements.len() { - block_type = expr_type; - } - } - - block_type - } - - /// Returns the type of the given identifier - fn check_ident( - &mut self, - ident: HirIdent, - expr_id: &ExprId, - generics: Option>, - ) -> Type { - let mut bindings = TypeBindings::new(); - - // Add type bindings from any constraints that were used. - // We need to do this first since otherwise instantiating the type below - // will replace each trait generic with a fresh type variable, rather than - // the type used in the trait constraint (if it exists). See #4088. - if let ImplKind::TraitMethod(_, constraint, assumed) = &ident.impl_kind { - let the_trait = self.interner.get_trait(constraint.trait_id); - assert_eq!(the_trait.generics.len(), constraint.trait_generics.len()); - - for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { - // Avoid binding t = t - if !arg.occurs(param.type_var.id()) { - bindings.insert(param.type_var.id(), (param.type_var.clone(), arg.clone())); - } - } - - // If the trait impl is already assumed to exist we should add any type bindings for `Self`. - // Otherwise `self` will be replaced with a fresh type variable, which will require the user - // to specify a redundant type annotation. - if *assumed { - bindings.insert( - the_trait.self_type_typevar_id, - (the_trait.self_type_typevar.clone(), constraint.typ.clone()), - ); - } - } - - // An identifiers type may be forall-quantified in the case of generic functions. - // E.g. `fn foo(t: T, field: Field) -> T` has type `forall T. fn(T, Field) -> T`. - // We must instantiate identifiers at every call site to replace this T with a new type - // variable to handle generic functions. - let t = self.interner.id_type_substitute_trait_as_type(ident.id); - - let definition = self.interner.try_definition(ident.id); - let function_generic_count = definition.map_or(0, |definition| match &definition.kind { - DefinitionKind::Function(function) => { - self.interner.function_modifiers(function).generic_count - } - _ => 0, - }); - - let span = self.interner.expr_span(expr_id); - // This instantiates a trait's generics as well which need to be set - // when the constraint below is later solved for when the function is - // finished. How to link the two? - let (typ, bindings) = self.instantiate(t, bindings, generics, function_generic_count, span); - - // Push any trait constraints required by this definition to the context - // to be checked later when the type of this variable is further constrained. - if let Some(definition) = self.interner.try_definition(ident.id) { - if let DefinitionKind::Function(func_id) = definition.kind { - let function = self.interner.function_meta(&func_id); - for mut constraint in function.trait_constraints.clone() { - constraint.apply_bindings(&bindings); - self.trait_constraints.push((constraint, *expr_id)); - } - } - } - - if let ImplKind::TraitMethod(_, mut constraint, assumed) = ident.impl_kind { - constraint.apply_bindings(&bindings); - if assumed { - let trait_impl = TraitImplKind::Assumed { - object_type: constraint.typ, - trait_generics: constraint.trait_generics, - }; - self.interner.select_impl_for_expression(*expr_id, trait_impl); - } else { - // Currently only one impl can be selected per expr_id, so this - // constraint needs to be pushed after any other constraints so - // that monomorphization can resolve this trait method to the correct impl. - self.trait_constraints.push((constraint, *expr_id)); - } - } - - self.interner.store_instantiation_bindings(*expr_id, bindings); - typ - } - - fn instantiate( - &mut self, - typ: Type, - bindings: TypeBindings, - turbofish_generics: Option>, - function_generic_count: usize, - span: Span, - ) -> (Type, TypeBindings) { - match turbofish_generics { - Some(turbofish_generics) => { - if turbofish_generics.len() != function_generic_count { - self.errors.push(TypeCheckError::IncorrectTurbofishGenericCount { - expected_count: function_generic_count, - actual_count: turbofish_generics.len(), - span, - }); - typ.instantiate_with_bindings(bindings, self.interner) - } else { - // Fetch the count of any implicit generics on the function, such as - // for a method within a generic impl. - let implicit_generic_count = match &typ { - Type::Forall(generics, _) => generics.len() - function_generic_count, - _ => 0, - }; - typ.instantiate_with(turbofish_generics, self.interner, implicit_generic_count) - } - } - None => typ.instantiate_with_bindings(bindings, self.interner), - } - } - - pub fn verify_trait_constraint( - &mut self, - object_type: &Type, - trait_id: TraitId, - trait_generics: &[Type], - function_ident_id: ExprId, - span: Span, - ) { - match self.interner.lookup_trait_implementation(object_type, trait_id, trait_generics) { - Ok(impl_kind) => { - self.interner.select_impl_for_expression(function_ident_id, impl_kind); - } - Err(erroring_constraints) => { - if erroring_constraints.is_empty() { - self.errors.push(TypeCheckError::TypeAnnotationsNeeded { span }); - } else if let Some(error) = - NoMatchingImplFoundError::new(self.interner, erroring_constraints, span) - { - self.errors.push(TypeCheckError::NoMatchingImplFound(error)); - } - } - } - } - - /// Check if the given method type requires a mutable reference to the object type, and check - /// if the given object type is already a mutable reference. If not, add one. - /// This is used to automatically transform a method call: `foo.bar()` into a function - /// call: `bar(&mut foo)`. - /// - /// A notable corner case of this function is where it interacts with auto-deref of `.`. - /// If a field is being mutated e.g. `foo.bar.mutate_bar()` where `foo: &mut Foo`, the compiler - /// will insert a dereference before bar `(*foo).bar.mutate_bar()` which would cause us to - /// mutate a copy of bar rather than a reference to it. We must check for this corner case here - /// and remove the implicitly added dereference operator if we find one. - fn try_add_mutable_reference_to_object( - &mut self, - method_call: &mut HirMethodCallExpression, - function_type: &Type, - object_type: &mut Type, - ) { - let expected_object_type = match function_type { - Type::Function(args, _, _) => args.first(), - Type::Forall(_, typ) => match typ.as_ref() { - Type::Function(args, _, _) => args.first(), - typ => unreachable!("Unexpected type for function: {typ}"), - }, - typ => unreachable!("Unexpected type for function: {typ}"), - }; - - if let Some(expected_object_type) = expected_object_type { - let actual_type = object_type.follow_bindings(); - - if matches!(expected_object_type.follow_bindings(), Type::MutableReference(_)) { - if !matches!(actual_type, Type::MutableReference(_)) { - if let Err(error) = verify_mutable_reference(self.interner, method_call.object) - { - self.errors.push(TypeCheckError::ResolverError(error)); - } - - let new_type = Type::MutableReference(Box::new(actual_type)); - *object_type = new_type.clone(); - - // First try to remove a dereference operator that may have been implicitly - // inserted by a field access expression `foo.bar` on a mutable reference `foo`. - let new_object = self.try_remove_implicit_dereference(method_call.object); - - // If that didn't work, then wrap the whole expression in an `&mut` - method_call.object = new_object.unwrap_or_else(|| { - let location = self.interner.id_location(method_call.object); - - let new_object = - self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { - operator: UnaryOp::MutableReference, - rhs: method_call.object, - trait_method_id: None, - })); - self.interner.push_expr_type(new_object, new_type); - self.interner.push_expr_location(new_object, location.span, location.file); - new_object - }); - } - // Otherwise if the object type is a mutable reference and the method is not, insert as - // many dereferences as needed. - } else if matches!(actual_type, Type::MutableReference(_)) { - let (object, new_type) = - self.insert_auto_dereferences(method_call.object, actual_type); - *object_type = new_type; - method_call.object = object; - } - } - } - - /// Insert as many dereference operations as necessary to automatically dereference a method - /// call object to its base value type T. - pub(crate) fn insert_auto_dereferences(&mut self, object: ExprId, typ: Type) -> (ExprId, Type) { - if let Type::MutableReference(element) = typ { - let location = self.interner.id_location(object); - - let object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { - operator: UnaryOp::Dereference { implicitly_added: true }, - rhs: object, - trait_method_id: None, - })); - self.interner.push_expr_type(object, element.as_ref().clone()); - self.interner.push_expr_location(object, location.span, location.file); - - // Recursively dereference to allow for converting &mut &mut T to T - self.insert_auto_dereferences(object, *element) - } else { - (object, typ) - } - } - - /// Given a method object: `(*foo).bar` of a method call `(*foo).bar.baz()`, remove the - /// implicitly added dereference operator if one is found. - /// - /// Returns Some(new_expr_id) if a dereference was removed and None otherwise. - fn try_remove_implicit_dereference(&mut self, object: ExprId) -> Option { - match self.interner.expression(&object) { - HirExpression::MemberAccess(mut access) => { - let new_lhs = self.try_remove_implicit_dereference(access.lhs)?; - access.lhs = new_lhs; - access.is_offset = true; - - // `object` will have a different type now, which will be filled in - // later when type checking the method call as a function call. - self.interner.replace_expr(&object, HirExpression::MemberAccess(access)); - Some(object) - } - HirExpression::Prefix(prefix) => match prefix.operator { - // Found a dereference we can remove. Now just replace it with its rhs to remove it. - UnaryOp::Dereference { implicitly_added: true } => Some(prefix.rhs), - _ => None, - }, - _ => None, - } - } - - fn check_index_expression( - &mut self, - id: &ExprId, - mut index_expr: expr::HirIndexExpression, - ) -> Type { - let index_type = self.check_expression(&index_expr.index); - let span = self.interner.expr_span(&index_expr.index); - - index_type.unify(&self.polymorphic_integer_or_field(), &mut self.errors, || { - TypeCheckError::TypeMismatch { - expected_typ: "an integer".to_owned(), - expr_typ: index_type.to_string(), - expr_span: span, - } - }); - - // When writing `a[i]`, if `a : &mut ...` then automatically dereference `a` as many - // times as needed to get the underlying array. - let lhs_type = self.check_expression(&index_expr.collection); - let (new_lhs, lhs_type) = self.insert_auto_dereferences(index_expr.collection, lhs_type); - index_expr.collection = new_lhs; - self.interner.replace_expr(id, HirExpression::Index(index_expr)); - - match lhs_type.follow_bindings() { - // XXX: We can check the array bounds here also, but it may be better to constant fold first - // and have ConstId instead of ExprId for constants - Type::Array(_, base_type) => *base_type, - Type::Slice(base_type) => *base_type, - Type::Error => Type::Error, - typ => { - let span = self.interner.expr_span(&new_lhs); - self.errors.push(TypeCheckError::TypeMismatch { - expected_typ: "Array".to_owned(), - expr_typ: typ.to_string(), - expr_span: span, - }); - Type::Error - } - } - } - - fn check_cast(&mut self, from: Type, to: Type, span: Span) -> Type { - match from.follow_bindings() { - Type::Integer(..) - | Type::FieldElement - | Type::TypeVariable(_, TypeVariableKind::IntegerOrField) - | Type::TypeVariable(_, TypeVariableKind::Integer) - | Type::Bool => (), - - Type::TypeVariable(_, _) => { - self.errors.push(TypeCheckError::TypeAnnotationsNeeded { span }); - return Type::Error; - } - Type::Error => return Type::Error, - from => { - self.errors.push(TypeCheckError::InvalidCast { from, span }); - return Type::Error; - } - } - - match to { - Type::Integer(sign, bits) => Type::Integer(sign, bits), - Type::FieldElement => Type::FieldElement, - Type::Bool => Type::Bool, - Type::Error => Type::Error, - _ => { - self.errors.push(TypeCheckError::UnsupportedCast { span }); - Type::Error - } - } - } - - fn check_if_expr(&mut self, if_expr: &expr::HirIfExpression, expr_id: &ExprId) -> Type { - let cond_type = self.check_expression(&if_expr.condition); - let then_type = self.check_expression(&if_expr.consequence); - - let expr_span = self.interner.expr_span(&if_expr.condition); - - self.unify(&cond_type, &Type::Bool, || TypeCheckError::TypeMismatch { - expected_typ: Type::Bool.to_string(), - expr_typ: cond_type.to_string(), - expr_span, - }); - - match if_expr.alternative { - None => Type::Unit, - Some(alternative) => { - let else_type = self.check_expression(&alternative); - - let expr_span = self.interner.expr_span(expr_id); - self.unify(&then_type, &else_type, || { - let err = TypeCheckError::TypeMismatch { - expected_typ: then_type.to_string(), - expr_typ: else_type.to_string(), - expr_span, - }; - - let context = if then_type == Type::Unit { - "Are you missing a semicolon at the end of your 'else' branch?" - } else if else_type == Type::Unit { - "Are you missing a semicolon at the end of the first block of this 'if'?" - } else { - "Expected the types of both if branches to be equal" - }; - - err.add_context(context) - }); - - then_type - } - } - } - - fn check_constructor( - &mut self, - constructor: expr::HirConstructorExpression, - expr_id: &ExprId, - ) -> Type { - let typ = constructor.r#type; - let generics = constructor.struct_generics; - - // Sort argument types by name so we can zip with the struct type in the same ordering. - // Note that we use a Vec to store the original arguments (rather than a BTreeMap) to - // preserve the evaluation order of the source code. - let mut args = constructor.fields; - sort_by_key_ref(&mut args, |(name, _)| name); - - let mut fields = typ.borrow().get_fields(&generics); - sort_by_key_ref(&mut fields, |(name, _)| name); - - for ((param_name, param_type), (arg_ident, arg)) in fields.into_iter().zip(args) { - // This can be false if the user provided an incorrect field count. That error should - // be caught during name resolution so it is fine to skip typechecking if there is a - // mismatch here as long as we continue typechecking the rest of the program to the best - // of our ability. - if param_name == arg_ident.0.contents { - let arg_type = self.check_expression(&arg); - - let span = self.interner.expr_span(expr_id); - self.unify_with_coercions(&arg_type, ¶m_type, arg, || { - TypeCheckError::TypeMismatch { - expected_typ: param_type.to_string(), - expr_typ: arg_type.to_string(), - expr_span: span, - } - }); - } - } - - Type::Struct(typ, generics) - } - - fn check_member_access(&mut self, mut access: expr::HirMemberAccess, expr_id: ExprId) -> Type { - let lhs_type = self.check_expression(&access.lhs).follow_bindings(); - let span = self.interner.expr_span(&expr_id); - let access_lhs = &mut access.lhs; - - let dereference_lhs = |this: &mut Self, lhs_type, element| { - let old_lhs = *access_lhs; - - *access_lhs = this.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { - operator: crate::ast::UnaryOp::Dereference { implicitly_added: true }, - rhs: old_lhs, - trait_method_id: None, - })); - this.interner.push_expr_type(old_lhs, lhs_type); - this.interner.push_expr_type(*access_lhs, element); - - let old_location = this.interner.id_location(old_lhs); - this.interner.push_expr_location(*access_lhs, span, old_location.file); - }; - - // If this access is just a field offset, we want to avoid dereferencing - let dereference_lhs = (!access.is_offset).then_some(dereference_lhs); - - match self.check_field_access(&lhs_type, &access.rhs.0.contents, span, dereference_lhs) { - Some((element_type, index)) => { - self.interner.set_field_index(expr_id, index); - // We must update `access` in case we added any dereferences to it - self.interner.replace_expr(&expr_id, HirExpression::MemberAccess(access)); - element_type - } - None => Type::Error, - } - } - - /// This will verify that an expression in the form `lhs.rhs_name` has the given field and will push - /// a type error if it does not. If there is no error, the type of the struct/tuple field is returned - /// along with the index of the field in question. - /// - /// This function is abstracted from check_member_access so that it can be shared between - /// there and the HirLValue::MemberAccess case of check_lvalue. - /// - /// `dereference_lhs` is called when the lhs type is a Type::MutableReference that should be - /// automatically dereferenced so its field can be extracted. This function is expected to - /// perform any mutations necessary to wrap the lhs in a UnaryOp::Dereference prefix - /// expression. The second parameter of this function represents the lhs_type (which should - /// always be a Type::MutableReference if `dereference_lhs` is called) and the third - /// represents the element type. - /// - /// If `dereference_lhs` is None, this will assume we're taking the offset of a struct field - /// rather than dereferencing it. So the result of `foo.bar` with a `foo : &mut Foo` will - /// be a `&mut Bar` rather than just a `Bar`. - pub(super) fn check_field_access( - &mut self, - lhs_type: &Type, - field_name: &str, - span: Span, - dereference_lhs: Option, - ) -> Option<(Type, usize)> { - let lhs_type = lhs_type.follow_bindings(); - - match &lhs_type { - Type::Struct(s, args) => { - let s = s.borrow(); - if let Some((field, index)) = s.get_field(field_name, args) { - return Some((field, index)); - } - } - Type::Tuple(elements) => { - if let Ok(index) = field_name.parse::() { - let length = elements.len(); - if index < length { - return Some((elements[index].clone(), index)); - } else { - self.errors.push(TypeCheckError::TupleIndexOutOfBounds { - index, - lhs_type, - length, - span, - }); - return None; - } - } - } - // If the lhs is a mutable reference we automatically transform - // lhs.field into (*lhs).field - Type::MutableReference(element) => { - if let Some(mut dereference_lhs) = dereference_lhs { - dereference_lhs(self, lhs_type.clone(), element.as_ref().clone()); - return self.check_field_access( - element, - field_name, - span, - Some(dereference_lhs), - ); - } else { - let (element, index) = - self.check_field_access(element, field_name, span, dereference_lhs)?; - return Some((Type::MutableReference(Box::new(element)), index)); - } - } - _ => (), - } - - // If we get here the type has no field named 'access.rhs'. - // Now we specialize the error message based on whether we know the object type in question yet. - if let Type::TypeVariable(..) = &lhs_type { - self.errors.push(TypeCheckError::TypeAnnotationsNeeded { span }); - } else if lhs_type != Type::Error { - self.errors.push(TypeCheckError::AccessUnknownMember { - lhs_type, - field_name: field_name.to_string(), - span, - }); - } - - None - } - - // Given a binary comparison operator and another type. This method will produce the output type - // and a boolean indicating whether to use the trait impl corresponding to the operator - // or not. A value of false indicates the caller to use a primitive operation for this - // operator, while a true value indicates a user-provided trait impl is required. - fn comparator_operand_type_rules( - &mut self, - lhs_type: &Type, - rhs_type: &Type, - op: &HirBinaryOp, - span: Span, - ) -> Result<(Type, bool), TypeCheckError> { - use Type::*; - - match (lhs_type, rhs_type) { - // Avoid reporting errors multiple times - (Error, _) | (_, Error) => Ok((Bool, false)), - (Alias(alias, args), other) | (other, Alias(alias, args)) => { - let alias = alias.borrow().get_type(args); - self.comparator_operand_type_rules(&alias, other, op, span) - } - - // Matches on TypeVariable must be first to follow any type - // bindings. - (TypeVariable(var, _), other) | (other, TypeVariable(var, _)) => { - if let TypeBinding::Bound(binding) = &*var.borrow() { - return self.comparator_operand_type_rules(other, binding, op, span); - } - - let use_impl = self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); - Ok((Bool, use_impl)) - } - (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { - if sign_x != sign_y { - return Err(TypeCheckError::IntegerSignedness { - sign_x: *sign_x, - sign_y: *sign_y, - span, - }); - } - if bit_width_x != bit_width_y { - return Err(TypeCheckError::IntegerBitWidth { - bit_width_x: *bit_width_x, - bit_width_y: *bit_width_y, - span, - }); - } - Ok((Bool, false)) - } - (FieldElement, FieldElement) => { - if op.kind.is_valid_for_field_type() { - Ok((Bool, false)) - } else { - Err(TypeCheckError::FieldComparison { span }) - } - } - - // <= and friends are technically valid for booleans, just not very useful - (Bool, Bool) => Ok((Bool, false)), - - (lhs, rhs) => { - self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - span: op.location.span, - source: Source::Binary, - }); - Ok((Bool, true)) - } - } - } - - fn lookup_method( - &mut self, - object_type: &Type, - method_name: &str, - expr_id: &ExprId, - ) -> Option { - match object_type.follow_bindings() { - Type::Struct(typ, _args) => { - let id = typ.borrow().id; - match self.interner.lookup_method(object_type, id, method_name, false) { - Some(method_id) => Some(HirMethodReference::FuncId(method_id)), - None => { - self.errors.push(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span: self.interner.expr_span(expr_id), - }); - None - } - } - } - // TODO: We should allow method calls on `impl Trait`s eventually. - // For now it is fine since they are only allowed on return types. - Type::TraitAsType(..) => { - self.errors.push(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span: self.interner.expr_span(expr_id), - }); - None - } - Type::NamedGeneric(_, _, _) => { - let func_meta = self.interner.function_meta( - &self.current_function.expect("unexpected method outside a function"), - ); - - for constraint in &func_meta.trait_constraints { - if *object_type == constraint.typ { - if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { - for (method_index, method) in the_trait.methods.iter().enumerate() { - if method.name.0.contents == method_name { - let trait_method = TraitMethodId { - trait_id: constraint.trait_id, - method_index, - }; - return Some(HirMethodReference::TraitMethodId( - trait_method, - constraint.trait_generics.clone(), - )); - } - } - } - } - } - - self.errors.push(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span: self.interner.expr_span(expr_id), - }); - None - } - // Mutable references to another type should resolve to methods of their element type. - // This may be a struct or a primitive type. - Type::MutableReference(element) => self - .interner - .lookup_primitive_trait_method_mut(element.as_ref(), method_name) - .map(HirMethodReference::FuncId) - .or_else(|| self.lookup_method(&element, method_name, expr_id)), - - // If we fail to resolve the object to a struct type, we have no way of type - // checking its arguments as we can't even resolve the name of the function - Type::Error => None, - - // The type variable must be unbound at this point since follow_bindings was called - Type::TypeVariable(_, TypeVariableKind::Normal) => { - let span = self.interner.expr_span(expr_id); - self.errors.push(TypeCheckError::TypeAnnotationsNeeded { span }); - None - } - - other => match self.interner.lookup_primitive_method(&other, method_name) { - Some(method_id) => Some(HirMethodReference::FuncId(method_id)), - None => { - self.errors.push(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span: self.interner.expr_span(expr_id), - }); - None - } - }, - } - } - - fn bind_function_type_impl( - &mut self, - fn_params: &[Type], - fn_ret: &Type, - callsite_args: &[(Type, ExprId, Span)], - span: Span, - ) -> Type { - if fn_params.len() != callsite_args.len() { - self.errors.push(TypeCheckError::ParameterCountMismatch { - expected: fn_params.len(), - found: callsite_args.len(), - span, - }); - return Type::Error; - } - - for (param, (arg, _, arg_span)) in fn_params.iter().zip(callsite_args) { - self.unify(arg, param, || TypeCheckError::TypeMismatch { - expected_typ: param.to_string(), - expr_typ: arg.to_string(), - expr_span: *arg_span, - }); - } - - fn_ret.clone() - } - - fn bind_function_type( - &mut self, - function: Type, - args: Vec<(Type, ExprId, Span)>, - span: Span, - ) -> Type { - // Could do a single unification for the entire function type, but matching beforehand - // lets us issue a more precise error on the individual argument that fails to type check. - match function { - Type::TypeVariable(binding, TypeVariableKind::Normal) => { - if let TypeBinding::Bound(typ) = &*binding.borrow() { - return self.bind_function_type(typ.clone(), args, span); - } - - let ret = self.interner.next_type_variable(); - let args = vecmap(args, |(arg, _, _)| arg); - let env_type = self.interner.next_type_variable(); - let expected = Type::Function(args, Box::new(ret.clone()), Box::new(env_type)); - - if let Err(error) = binding.try_bind(expected, span) { - self.errors.push(error); - } - ret - } - // ignoring env for subtype on purpose - Type::Function(parameters, ret, _env) => { - self.bind_function_type_impl(¶meters, &ret, &args, span) - } - Type::Error => Type::Error, - found => { - self.errors.push(TypeCheckError::ExpectedFunction { found, span }); - Type::Error - } - } - } - - /// Handles the TypeVariable case for checking binary operators. - /// Returns true if we should use the impl for the operator instead of the primitive - /// version of it. - fn bind_type_variables_for_infix( - &mut self, - lhs_type: &Type, - op: &HirBinaryOp, - rhs_type: &Type, - span: Span, - ) -> bool { - self.unify(lhs_type, rhs_type, || TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::Binary, - span, - }); - - let use_impl = !lhs_type.is_numeric(); - - // If this operator isn't valid for fields we have to possibly narrow - // TypeVariableKind::IntegerOrField to TypeVariableKind::Integer. - // Doing so also ensures a type error if Field is used. - // The is_numeric check is to allow impls for custom types to bypass this. - if !op.kind.is_valid_for_field_type() && lhs_type.is_numeric() { - let target = Type::polymorphic_integer(self.interner); - - use crate::ast::BinaryOpKind::*; - use TypeCheckError::*; - self.unify(lhs_type, &target, || match op.kind { - Less | LessEqual | Greater | GreaterEqual => FieldComparison { span }, - And | Or | Xor | ShiftRight | ShiftLeft => FieldBitwiseOp { span }, - Modulo => FieldModulo { span }, - other => unreachable!("Operator {other:?} should be valid for Field"), - }); - } - - use_impl - } - - // Given a binary operator and another type. This method will produce the output type - // and a boolean indicating whether to use the trait impl corresponding to the operator - // or not. A value of false indicates the caller to use a primitive operation for this - // operator, while a true value indicates a user-provided trait impl is required. - fn infix_operand_type_rules( - &mut self, - lhs_type: &Type, - op: &HirBinaryOp, - rhs_type: &Type, - span: Span, - ) -> Result<(Type, bool), TypeCheckError> { - if op.kind.is_comparator() { - return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); - } - - use Type::*; - match (lhs_type, rhs_type) { - // An error type on either side will always return an error - (Error, _) | (_, Error) => Ok((Error, false)), - (Alias(alias, args), other) | (other, Alias(alias, args)) => { - let alias = alias.borrow().get_type(args); - self.infix_operand_type_rules(&alias, op, other, span) - } - - // Matches on TypeVariable must be first so that we follow any type - // bindings. - (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { - if let TypeBinding::Bound(binding) = &*int.borrow() { - return self.infix_operand_type_rules(binding, op, other, span); - } - if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { - self.unify( - rhs_type, - &Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), - || TypeCheckError::InvalidShiftSize { span }, - ); - let use_impl = if lhs_type.is_numeric() { - let integer_type = Type::polymorphic_integer(self.interner); - self.bind_type_variables_for_infix(lhs_type, op, &integer_type, span) - } else { - true - }; - return Ok((lhs_type.clone(), use_impl)); - } - let use_impl = self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); - Ok((other.clone(), use_impl)) - } - (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { - if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { - if *sign_y != Signedness::Unsigned || *bit_width_y != IntegerBitSize::Eight { - return Err(TypeCheckError::InvalidShiftSize { span }); - } - return Ok((Integer(*sign_x, *bit_width_x), false)); - } - if sign_x != sign_y { - return Err(TypeCheckError::IntegerSignedness { - sign_x: *sign_x, - sign_y: *sign_y, - span, - }); - } - if bit_width_x != bit_width_y { - return Err(TypeCheckError::IntegerBitWidth { - bit_width_x: *bit_width_x, - bit_width_y: *bit_width_y, - span, - }); - } - Ok((Integer(*sign_x, *bit_width_x), false)) - } - // The result of two Fields is always a witness - (FieldElement, FieldElement) => { - if !op.kind.is_valid_for_field_type() { - if op.kind == BinaryOpKind::Modulo { - return Err(TypeCheckError::FieldModulo { span }); - } else { - return Err(TypeCheckError::FieldBitwiseOp { span }); - } - } - Ok((FieldElement, false)) - } - - (Bool, Bool) => Ok((Bool, false)), - - (lhs, rhs) => { - if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { - if rhs == &Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight) { - return Ok((lhs.clone(), true)); - } - return Err(TypeCheckError::InvalidShiftSize { span }); - } - self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - span: op.location.span, - source: Source::Binary, - }); - Ok((lhs.clone(), true)) - } - } - } - - fn type_check_prefix_operand( - &mut self, - op: &crate::ast::UnaryOp, - rhs_type: &Type, - span: Span, - ) -> Type { - let mut unify = |expected| { - rhs_type.unify(&expected, &mut self.errors, || TypeCheckError::TypeMismatch { - expr_typ: rhs_type.to_string(), - expected_typ: expected.to_string(), - expr_span: span, - }); - expected - }; - - match op { - crate::ast::UnaryOp::Minus => { - if rhs_type.is_unsigned() { - self.errors - .push(TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span }); - } - let expected = self.polymorphic_integer_or_field(); - rhs_type.unify(&expected, &mut self.errors, || TypeCheckError::InvalidUnaryOp { - kind: rhs_type.to_string(), - span, - }); - expected - } - crate::ast::UnaryOp::Not => { - let rhs_type = rhs_type.follow_bindings(); - - // `!` can work on booleans or integers - if matches!(rhs_type, Type::Integer(..)) { - return rhs_type; - } - - unify(Type::Bool) - } - crate::ast::UnaryOp::MutableReference => { - Type::MutableReference(Box::new(rhs_type.follow_bindings())) - } - crate::ast::UnaryOp::Dereference { implicitly_added: _ } => { - let element_type = self.interner.next_type_variable(); - unify(Type::MutableReference(Box::new(element_type.clone()))); - element_type - } - } - } - - /// Prerequisite: verify_trait_constraint of the operator's trait constraint. - /// - /// Although by this point the operator is expected to already have a trait impl, - /// we still need to match the operator's type against the method's instantiated type - /// to ensure the instantiation bindings are correct and the monomorphizer can - /// re-apply the needed bindings. - fn typecheck_operator_method( - &mut self, - expr_id: ExprId, - trait_method_id: TraitMethodId, - object_type: &Type, - span: Span, - ) { - let the_trait = self.interner.get_trait(trait_method_id.trait_id); - - let method = &the_trait.methods[trait_method_id.method_index]; - let (method_type, mut bindings) = method.typ.instantiate(self.interner); - - match method_type { - Type::Function(args, _, _) => { - // We can cheat a bit and match against only the object type here since no operator - // overload uses other generic parameters or return types aside from the object type. - let expected_object_type = &args[0]; - self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { - expected_typ: expected_object_type.to_string(), - expr_typ: object_type.to_string(), - expr_span: span, - }); - } - other => { - unreachable!("Expected operator method to have a function type, but found {other}") - } - } - - // We must also remember to apply these substitutions to the object_type - // referenced by the selected trait impl, if one has yet to be selected. - let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); - if let Some(TraitImplKind::Assumed { object_type, trait_generics }) = impl_kind { - let the_trait = self.interner.get_trait(trait_method_id.trait_id); - let object_type = object_type.substitute(&bindings); - bindings.insert( - the_trait.self_type_typevar_id, - (the_trait.self_type_typevar.clone(), object_type.clone()), - ); - self.interner.select_impl_for_expression( - expr_id, - TraitImplKind::Assumed { object_type, trait_generics }, - ); - } - - self.interner.store_instantiation_bindings(expr_id, bindings); - } -} - -/// Taken from: https://stackoverflow.com/a/47127500 -fn sort_by_key_ref(xs: &mut [T], key: F) -where - F: Fn(&T) -> &K, - K: ?Sized + Ord, -{ - xs.sort_by(|x, y| key(x).cmp(key(y))); -} diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 1a70bade863..b6efa17a529 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -8,801 +8,5 @@ //! as all functions are required to give their full signatures. Closures are inferred but are //! never generalized and thus cannot be used polymorphically. mod errors; -mod expr; -mod stmt; - -pub use errors::{NoMatchingImplFoundError, TypeCheckError}; -use noirc_errors::Span; - -use crate::{ - hir_def::{ - expr::HirExpression, - function::{Param, Parameters}, - stmt::HirStatement, - traits::TraitConstraint, - }, - node_interner::{ExprId, FuncId, GlobalId, NodeInterner}, - Kind, ResolvedGeneric, Type, TypeBindings, -}; - pub use self::errors::Source; - -pub struct TypeChecker<'interner> { - interner: &'interner mut NodeInterner, - errors: Vec, - current_function: Option, - - /// Trait constraints are collected during type checking until they are - /// verified at the end of a function. This is because constraints arise - /// on each variable, but it is only until function calls when the types - /// needed for the trait constraint may become known. - trait_constraints: Vec<(TraitConstraint, ExprId)>, - - /// All type variables created in the current function. - /// This map is used to default any integer type variables at the end of - /// a function (before checking trait constraints) if a type wasn't already chosen. - type_variables: Vec, -} - -/// Type checks a function and assigns the -/// appropriate types to expressions in a side table -pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec { - let meta = interner.function_meta(&func_id); - let declared_return_type = meta.return_type().clone(); - let can_ignore_ret = meta.is_stub(); - - let function_body_id = &interner.function(&func_id).as_expr(); - - let mut type_checker = TypeChecker::new(interner); - type_checker.current_function = Some(func_id); - - let meta = type_checker.interner.function_meta(&func_id); - let parameters = meta.parameters.clone(); - let expected_return_type = meta.return_type.clone(); - let expected_trait_constraints = meta.trait_constraints.clone(); - let name_span = meta.name.location.span; - - let mut errors = Vec::new(); - - // Temporarily add any impls in this function's `where` clause to scope - for constraint in &expected_trait_constraints { - let object = constraint.typ.clone(); - let trait_id = constraint.trait_id; - let generics = constraint.trait_generics.clone(); - - if !type_checker.interner.add_assumed_trait_implementation(object, trait_id, generics) { - if let Some(the_trait) = type_checker.interner.try_get_trait(trait_id) { - let trait_name = the_trait.name.to_string(); - let typ = constraint.typ.clone(); - let span = name_span; - errors.push(TypeCheckError::UnneededTraitConstraint { trait_name, typ, span }); - } - } - } - - // Bind each parameter to its annotated type. - // This is locally obvious, but it must be bound here so that the - // Definition object of the parameter in the NodeInterner is given the correct type. - for param in parameters { - check_if_type_is_valid_for_program_input(&type_checker, func_id, ¶m, &mut errors); - type_checker.bind_pattern(¶m.0, param.1); - } - - let function_last_type = type_checker.check_function_body(function_body_id); - // Check declared return type and actual return type - if !can_ignore_ret { - let (expr_span, empty_function) = function_info(type_checker.interner, function_body_id); - let func_span = type_checker.interner.expr_span(function_body_id); // XXX: We could be more specific and return the span of the last stmt, however stmts do not have spans yet - if let Type::TraitAsType(trait_id, _, generics) = &declared_return_type { - if type_checker - .interner - .lookup_trait_implementation(&function_last_type, *trait_id, generics) - .is_err() - { - let error = TypeCheckError::TypeMismatchWithSource { - expected: declared_return_type.clone(), - actual: function_last_type, - span: func_span, - source: Source::Return(expected_return_type, expr_span), - }; - errors.push(error); - } - } else { - function_last_type.unify_with_coercions( - &declared_return_type, - *function_body_id, - type_checker.interner, - &mut errors, - || { - let mut error = TypeCheckError::TypeMismatchWithSource { - expected: declared_return_type.clone(), - actual: function_last_type.clone(), - span: func_span, - source: Source::Return(expected_return_type, expr_span), - }; - - if empty_function { - error = error.add_context("implicitly returns `()` as its body has no tail or `return` expression"); - } - error - }, - ); - } - } - - // Default any type variables that still need defaulting. - // This is done before trait impl search since leaving them bindable can lead to errors - // when multiple impls are available. Instead we default first to choose the Field or u64 impl. - for typ in &type_checker.type_variables { - if let Type::TypeVariable(variable, kind) = typ.follow_bindings() { - let msg = "TypeChecker should only track defaultable type vars"; - variable.bind(kind.default_type().expect(msg)); - } - } - - // Verify any remaining trait constraints arising from the function body - for (mut constraint, expr_id) in std::mem::take(&mut type_checker.trait_constraints) { - let span = type_checker.interner.expr_span(&expr_id); - - if matches!(&constraint.typ, Type::MutableReference(_)) { - let (_, dereferenced_typ) = - type_checker.insert_auto_dereferences(expr_id, constraint.typ.clone()); - constraint.typ = dereferenced_typ; - } - - type_checker.verify_trait_constraint( - &constraint.typ, - constraint.trait_id, - &constraint.trait_generics, - expr_id, - span, - ); - } - - // Now remove all the `where` clause constraints we added - for constraint in &expected_trait_constraints { - type_checker.interner.remove_assumed_trait_implementations_for_trait(constraint.trait_id); - } - - errors.append(&mut type_checker.errors); - errors -} - -/// Only sized types are valid to be used as main's parameters or the parameters to a contract -/// function. If the given type is not sized (e.g. contains a slice or NamedGeneric type), an -/// error is issued. -fn check_if_type_is_valid_for_program_input( - type_checker: &TypeChecker<'_>, - func_id: FuncId, - param: &Param, - errors: &mut Vec, -) { - let meta = type_checker.interner.function_meta(&func_id); - if (meta.is_entry_point && !param.1.is_valid_for_program_input()) - || (meta.has_inline_attribute && !param.1.is_valid_non_inlined_function_input()) - { - let span = param.0.span(); - errors.push(TypeCheckError::InvalidTypeForEntryPoint { span }); - } -} - -fn function_info(interner: &NodeInterner, function_body_id: &ExprId) -> (noirc_errors::Span, bool) { - let (expr_span, empty_function) = - if let HirExpression::Block(block) = interner.expression(function_body_id) { - let last_stmt = block.statements().last(); - let mut span = interner.expr_span(function_body_id); - - if let Some(last_stmt) = last_stmt { - if let HirStatement::Expression(expr) = interner.statement(last_stmt) { - span = interner.expr_span(&expr); - } - } - - (span, last_stmt.is_none()) - } else { - (interner.expr_span(function_body_id), false) - }; - (expr_span, empty_function) -} - -/// Checks that the type of a function in a trait impl matches the type -/// of the corresponding function declaration in the trait itself. -/// -/// To do this, given a trait such as: -/// `trait Foo { fn foo(...); }` -/// -/// And an impl such as: -/// `impl Foo for Bar { fn foo(...); } ` -/// -/// We have to substitute: -/// - Self for Bar -/// - A for D -/// - B for F -/// -/// Before we can type check. Finally, we must also check that the unification -/// result does not introduce any new bindings. This can happen if the impl -/// function's type is more general than that of the trait function. E.g. -/// `fn baz(a: A, b: B)` when the impl required `fn baz(a: A, b: A)`. -/// -/// This does not type check the body of the impl function. -pub(crate) fn check_trait_impl_method_matches_declaration( - interner: &mut NodeInterner, - function: FuncId, -) -> Vec { - let meta = interner.function_meta(&function); - let method_name = interner.function_name(&function); - let mut errors = Vec::new(); - - let definition_type = meta.typ.as_monotype(); - - let impl_ = - meta.trait_impl.expect("Trait impl function should have a corresponding trait impl"); - - // If the trait implementation is not defined in the interner then there was a previous - // error in resolving the trait path and there is likely no trait for this impl. - let Some(impl_) = interner.try_get_trait_implementation(impl_) else { - return errors; - }; - - let impl_ = impl_.borrow(); - let trait_info = interner.get_trait(impl_.trait_id); - - let mut bindings = TypeBindings::new(); - bindings.insert( - trait_info.self_type_typevar_id, - (trait_info.self_type_typevar.clone(), impl_.typ.clone()), - ); - - if trait_info.generics.len() != impl_.trait_generics.len() { - let expected = trait_info.generics.len(); - let found = impl_.trait_generics.len(); - let span = impl_.ident.span(); - let item = trait_info.name.to_string(); - errors.push(TypeCheckError::GenericCountMismatch { item, expected, found, span }); - } - - // Substitute each generic on the trait with the corresponding generic on the impl - for (generic, arg) in trait_info.generics.iter().zip(&impl_.trait_generics) { - bindings.insert(generic.type_var.id(), (generic.type_var.clone(), arg.clone())); - } - - // If this is None, the trait does not have the corresponding function. - // This error should have been caught in name resolution already so we don't - // issue an error for it here. - if let Some(trait_fn_id) = trait_info.method_ids.get(method_name) { - let trait_fn_meta = interner.function_meta(trait_fn_id); - - if trait_fn_meta.direct_generics.len() != meta.direct_generics.len() { - let expected = trait_fn_meta.direct_generics.len(); - let found = meta.direct_generics.len(); - let span = meta.name.location.span; - let item = method_name.to_string(); - errors.push(TypeCheckError::GenericCountMismatch { item, expected, found, span }); - } - - // Substitute each generic on the trait function with the corresponding generic on the impl function - for ( - ResolvedGeneric { type_var: trait_fn_generic, .. }, - ResolvedGeneric { name, type_var: impl_fn_generic, .. }, - ) in trait_fn_meta.direct_generics.iter().zip(&meta.direct_generics) - { - let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone(), Kind::Normal); - bindings.insert(trait_fn_generic.id(), (trait_fn_generic.clone(), arg)); - } - - let (declaration_type, _) = trait_fn_meta.typ.instantiate_with_bindings(bindings, interner); - - check_function_type_matches_expected_type( - &declaration_type, - definition_type, - method_name, - &meta.parameters, - meta.name.location.span, - &trait_info.name.0.contents, - &mut errors, - ); - } - - errors -} - -fn check_function_type_matches_expected_type( - expected: &Type, - actual: &Type, - method_name: &str, - actual_parameters: &Parameters, - span: Span, - trait_name: &str, - errors: &mut Vec, -) { - let mut bindings = TypeBindings::new(); - // Shouldn't need to unify envs, they should always be equal since they're both free functions - if let (Type::Function(params_a, ret_a, _env_a), Type::Function(params_b, ret_b, _env_b)) = - (expected, actual) - { - if params_a.len() == params_b.len() { - for (i, (a, b)) in params_a.iter().zip(params_b.iter()).enumerate() { - if a.try_unify(b, &mut bindings).is_err() { - errors.push(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name: method_name.to_string(), - expected_typ: a.to_string(), - actual_typ: b.to_string(), - parameter_span: actual_parameters.0[i].0.span(), - parameter_index: i + 1, - }); - } - } - - if ret_b.try_unify(ret_a, &mut bindings).is_err() { - errors.push(TypeCheckError::TypeMismatch { - expected_typ: ret_a.to_string(), - expr_typ: ret_b.to_string(), - expr_span: span, - }); - } - } else { - errors.push(TypeCheckError::MismatchTraitImplNumParameters { - actual_num_parameters: params_b.len(), - expected_num_parameters: params_a.len(), - trait_name: trait_name.to_string(), - method_name: method_name.to_string(), - span, - }); - } - } - - // If result bindings is not empty, a type variable was bound which means the two - // signatures were not a perfect match. Note that this relies on us already binding - // all the expected generics to each other prior to this check. - if !bindings.is_empty() { - let expected_typ = expected.to_string(); - let expr_typ = actual.to_string(); - errors.push(TypeCheckError::TypeMismatch { expected_typ, expr_typ, expr_span: span }); - } -} - -impl<'interner> TypeChecker<'interner> { - fn new(interner: &'interner mut NodeInterner) -> Self { - Self { - interner, - errors: Vec::new(), - trait_constraints: Vec::new(), - type_variables: Vec::new(), - current_function: None, - } - } - - fn check_function_body(&mut self, body: &ExprId) -> Type { - self.check_expression(body) - } - - pub fn check_global( - id: GlobalId, - interner: &'interner mut NodeInterner, - ) -> Vec { - let mut this = Self { - interner, - errors: Vec::new(), - trait_constraints: Vec::new(), - type_variables: Vec::new(), - current_function: None, - }; - let statement = this.interner.get_global(id).let_statement; - this.check_statement(&statement); - this.errors - } - - /// Wrapper of Type::unify using self.errors - fn unify( - &mut self, - actual: &Type, - expected: &Type, - make_error: impl FnOnce() -> TypeCheckError, - ) { - actual.unify(expected, &mut self.errors, make_error); - } - - /// Wrapper of Type::unify_with_coercions using self.errors - fn unify_with_coercions( - &mut self, - actual: &Type, - expected: &Type, - expression: ExprId, - make_error: impl FnOnce() -> TypeCheckError, - ) { - actual.unify_with_coercions( - expected, - expression, - self.interner, - &mut self.errors, - make_error, - ); - } - - /// Return a fresh integer or field type variable and log it - /// in self.type_variables to default it later. - fn polymorphic_integer_or_field(&mut self) -> Type { - let typ = Type::polymorphic_integer_or_field(self.interner); - self.type_variables.push(typ.clone()); - typ - } - - /// Return a fresh integer type variable and log it - /// in self.type_variables to default it later. - fn polymorphic_integer(&mut self) -> Type { - let typ = Type::polymorphic_integer(self.interner); - self.type_variables.push(typ.clone()); - typ - } -} - -// XXX: These tests are all manual currently. -/// We can either build a test apparatus or pass raw code through the resolver -#[cfg(test)] -pub mod test { - use std::collections::{BTreeMap, HashMap}; - use std::vec; - - use fm::FileId; - use iter_extended::btree_map; - use noirc_errors::{Location, Span}; - - use crate::ast::{BinaryOpKind, FunctionKind, FunctionReturnType, Path, Visibility}; - use crate::graph::CrateId; - use crate::hir::def_map::{ModuleData, ModuleId}; - use crate::hir::resolution::import::{ - PathResolution, PathResolutionError, PathResolutionResult, - }; - use crate::hir_def::expr::HirIdent; - use crate::hir_def::function::FunctionBody; - use crate::hir_def::stmt::HirLetStatement; - use crate::hir_def::stmt::HirPattern::Identifier; - use crate::hir_def::types::Type; - use crate::hir_def::{ - expr::{HirBinaryOp, HirBlockExpression, HirExpression, HirInfixExpression}, - function::{FuncMeta, HirFunction}, - stmt::HirStatement, - }; - use crate::node_interner::{ - DefinitionKind, FuncId, NodeInterner, ReferenceId, TraitId, TraitMethodId, - }; - use crate::{ - hir::{ - def_map::{CrateDefMap, LocalModuleId, ModuleDefId}, - resolution::{path_resolver::PathResolver, resolver::Resolver}, - }, - parse_program, - }; - - #[test] - fn basic_let() { - let mut interner = NodeInterner::default(); - interner.populate_dummy_operator_traits(); - - // Safety: The FileId in a location isn't used for tests - let file = FileId::default(); - let location = Location::new(Span::default(), file); - - // Add a simple let Statement into the interner - // let z = x + y; - // - // Push x variable - let x_id = interner.push_definition( - "x".into(), - false, - false, - DefinitionKind::Local(None), - location, - ); - - let x = HirIdent::non_trait_method(x_id, location); - - // Push y variable - let y_id = interner.push_definition( - "y".into(), - false, - false, - DefinitionKind::Local(None), - location, - ); - let y = HirIdent::non_trait_method(y_id, location); - - // Push z variable - let z_id = interner.push_definition( - "z".into(), - false, - false, - DefinitionKind::Local(None), - location, - ); - let z = HirIdent::non_trait_method(z_id, location); - - // Push x and y as expressions - let x_expr_id = interner.push_expr(HirExpression::Ident(x.clone(), None)); - let y_expr_id = interner.push_expr(HirExpression::Ident(y.clone(), None)); - - // Create Infix - let operator = HirBinaryOp { location, kind: BinaryOpKind::Add }; - let trait_id = TraitId(ModuleId::dummy_id()); - let trait_method_id = TraitMethodId { trait_id, method_index: 0 }; - let expr = HirInfixExpression { lhs: x_expr_id, operator, rhs: y_expr_id, trait_method_id }; - let expr_id = interner.push_expr(HirExpression::Infix(expr)); - interner.push_expr_location(expr_id, Span::single_char(0), file); - - interner.push_expr_location(x_expr_id, Span::single_char(0), file); - interner.push_expr_location(y_expr_id, Span::single_char(0), file); - - // Create let statement - let let_stmt = HirLetStatement { - pattern: Identifier(z), - r#type: Type::FieldElement, - expression: expr_id, - attributes: vec![], - comptime: false, - }; - let stmt_id = interner.push_stmt(HirStatement::Let(let_stmt)); - let expr_id = interner - .push_expr(HirExpression::Block(HirBlockExpression { statements: vec![stmt_id] })); - interner.push_expr_location(expr_id, Span::single_char(0), file); - - // Create function to enclose the let statement - let func = HirFunction::unchecked_from_expr(expr_id); - let func_id = interner.push_fn(func); - - let definition = DefinitionKind::Local(None); - let id = interner.push_definition("test_func".into(), false, false, definition, location); - let name = HirIdent::non_trait_method(id, location); - - // Add function meta - let func_meta = FuncMeta { - name, - kind: FunctionKind::Normal, - location, - typ: Type::Function( - vec![Type::FieldElement, Type::FieldElement], - Box::new(Type::Unit), - Box::new(Type::Unit), - ), - parameters: vec![ - (Identifier(x), Type::FieldElement, Visibility::Private), - (Identifier(y), Type::FieldElement, Visibility::Private), - ] - .into(), - return_visibility: Visibility::Private, - has_body: true, - trait_impl: None, - return_type: FunctionReturnType::Default(Span::default()), - trait_constraints: Vec::new(), - direct_generics: Vec::new(), - is_entry_point: true, - is_trait_function: false, - has_inline_attribute: false, - all_generics: Vec::new(), - parameter_idents: Vec::new(), - function_body: FunctionBody::Resolved, - source_crate: CrateId::dummy_id(), - }; - interner.push_fn_meta(func_meta, func_id); - - let errors = super::type_check_func(&mut interner, func_id); - assert!(errors.is_empty()); - } - - #[test] - #[should_panic] - fn basic_let_stmt() { - let src = r#" - fn main(x : Field) { - let k = [x,x]; - let _z = x + k; - } - "#; - - type_check_src_code(src, vec![String::from("main")]); - } - - #[test] - fn basic_index_expr() { - let src = r#" - fn main(x : Field) { - let k = [x,x]; - let _z = x + k[0]; - } - "#; - - type_check_src_code(src, vec![String::from("main")]); - } - #[test] - fn basic_call_expr() { - let src = r#" - fn main(x : Field) { - let _z = x + foo(x); - } - - fn foo(x : Field) -> Field { - x - } - "#; - - type_check_src_code(src, vec![String::from("main"), String::from("foo")]); - } - #[test] - fn basic_for_expr() { - let src = r#" - fn main(_x : Field) { - for _i in 0..10 { - for _k in 0..100 { - - } - } - } - - "#; - - type_check_src_code(src, vec![String::from("main")]); - } - #[test] - fn basic_closure() { - let src = r#" - fn main(x : Field) -> pub Field { - let closure = |y| y + x; - closure(x) - } - "#; - - type_check_src_code(src, vec![String::from("main")]); - } - - #[test] - fn closure_with_no_args() { - let src = r#" - fn main(x : Field) -> pub Field { - let closure = || x; - closure() - } - "#; - - type_check_src_code(src, vec![String::from("main")]); - } - - #[test] - fn fold_entry_point() { - let src = r#" - #[fold] - fn fold(x: &mut Field) -> Field { - *x - } - "#; - - type_check_src_code_errors_expected(src, vec![String::from("fold")], 1); - } - - #[test] - fn fold_numeric_generic() { - let src = r#" - #[fold] - fn fold(x: T) -> T { - x - } - "#; - - type_check_src_code(src, vec![String::from("fold")]); - } - // This is the same Stub that is in the resolver, maybe we can pull this out into a test module and re-use? - struct TestPathResolver(HashMap); - - impl PathResolver for TestPathResolver { - fn resolve( - &self, - _def_maps: &BTreeMap, - path: Path, - _path_references: &mut Option<&mut Vec>, - ) -> PathResolutionResult { - // Not here that foo::bar and hello::foo::bar would fetch the same thing - let name = path.segments.last().unwrap(); - self.0 - .get(&name.0.contents) - .cloned() - .map(|module_def_id| PathResolution { module_def_id, error: None }) - .ok_or_else(move || PathResolutionError::Unresolved(name.clone())) - } - - fn local_module_id(&self) -> LocalModuleId { - LocalModuleId(noirc_arena::Index::unsafe_zeroed()) - } - - fn module_id(&self) -> ModuleId { - ModuleId { krate: CrateId::dummy_id(), local_id: self.local_module_id() } - } - } - - impl TestPathResolver { - fn insert_func(&mut self, name: String, func_id: FuncId) { - self.0.insert(name, func_id.into()); - } - } - - pub fn type_check_src_code(src: &str, func_namespace: Vec) -> (NodeInterner, FuncId) { - type_check_src_code_errors_expected(src, func_namespace, 0) - } - - // This function assumes that there is only one function and this is the - // func id that is returned - fn type_check_src_code_errors_expected( - src: &str, - func_namespace: Vec, - expected_num_type_check_errs: usize, - ) -> (NodeInterner, FuncId) { - let (program, errors) = parse_program(src); - let mut interner = NodeInterner::default(); - interner.populate_dummy_operator_traits(); - - if !errors.iter().all(|error| error.is_warning()) { - assert_eq!( - errors.len(), - 0, - "expected 0 parser errors, but got {}, errors: {:?}", - errors.len(), - errors - ); - } - - let func_ids = btree_map(&func_namespace, |name| { - (name.to_string(), interner.push_test_function_definition(name.into())) - }); - - let main_id = - *func_ids.get("main").unwrap_or_else(|| func_ids.first_key_value().unwrap().1); - - let mut path_resolver = TestPathResolver(HashMap::new()); - for (name, id) in func_ids.iter() { - path_resolver.insert_func(name.to_owned(), *id); - } - - let mut def_maps = BTreeMap::new(); - let file = FileId::default(); - - let mut modules = noirc_arena::Arena::default(); - let location = Location::new(Default::default(), file); - modules.insert(ModuleData::new(None, location, false)); - - def_maps.insert( - CrateId::dummy_id(), - CrateDefMap { - root: path_resolver.local_module_id(), - modules, - krate: CrateId::dummy_id(), - extern_prelude: BTreeMap::new(), - }, - ); - - for nf in program.into_sorted().functions { - let resolver = Resolver::new(&mut interner, &path_resolver, &def_maps, file); - - let function_id = *func_ids.get(nf.name()).unwrap(); - let (hir_func, func_meta, resolver_errors) = resolver.resolve_function(nf, function_id); - - interner.push_fn_meta(func_meta, function_id); - interner.update_fn(function_id, hir_func); - assert_eq!(resolver_errors, vec![]); - } - - // Type check section - let mut errors = Vec::new(); - - for function in func_ids.values() { - errors.extend(super::type_check_func(&mut interner, *function)); - } - - assert_eq!( - errors.len(), - expected_num_type_check_errs, - "expected {} type check errors, but got {}, errors: {:?}", - expected_num_type_check_errs, - errors.len(), - errors - ); - - (interner, main_id) - } -} +pub use errors::{NoMatchingImplFoundError, TypeCheckError}; diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs deleted file mode 100644 index 9abd1b34690..00000000000 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ /dev/null @@ -1,395 +0,0 @@ -use acvm::acir::AcirField; -use iter_extended::vecmap; -use noirc_errors::Span; - -use crate::ast::UnaryOp; -use crate::hir_def::expr::{HirExpression, HirIdent, HirLiteral}; -use crate::hir_def::stmt::{ - HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, - HirPattern, HirStatement, -}; -use crate::hir_def::types::Type; -use crate::node_interner::{DefinitionId, ExprId, StmtId}; - -use super::errors::{Source, TypeCheckError}; -use super::TypeChecker; - -impl<'interner> TypeChecker<'interner> { - /// Type checks a statement and all expressions/statements contained within. - /// - /// All statements have a unit type `()` as their type so the type of the statement - /// is not interesting. Type checking must still be done on statements to ensure any - /// expressions used within them are typed correctly. - pub(crate) fn check_statement(&mut self, stmt_id: &StmtId) -> Type { - match self.interner.statement(stmt_id) { - // Lets lay out a convincing argument that the handling of - // SemiExpressions and Expressions below is correct. - // - // The only time you will get a Semi expression is if - // you have an expression by itself - // - // Example: - // - // 5; or x; or x+a; - // - // In these cases, you cannot even get the expr_id because - // it is not bound to anything. We could therefore. - // - // However since TypeChecking checks the return type of the last statement - // the type checker could in the future incorrectly return the type. - // - // As it stands, this is also impossible because the ret_type function - // does not use the interner to get the type. It returns Unit. - // - // The reason why we still modify the database, is to make sure it is future-proof - HirStatement::Expression(expr_id) => { - return self.check_expression(&expr_id); - } - HirStatement::Semi(expr_id) => { - self.check_expression(&expr_id); - } - HirStatement::Let(let_stmt) => self.check_let_stmt(let_stmt), - HirStatement::Constrain(constrain_stmt) => self.check_constrain_stmt(constrain_stmt), - HirStatement::Assign(assign_stmt) => self.check_assign_stmt(assign_stmt, stmt_id), - HirStatement::For(for_loop) => self.check_for_loop(for_loop), - HirStatement::Comptime(statement) => return self.check_statement(&statement), - HirStatement::Break | HirStatement::Continue | HirStatement::Error => (), - } - Type::Unit - } - - fn check_for_loop(&mut self, for_loop: HirForStatement) { - let start_range_type = self.check_expression(&for_loop.start_range); - let end_range_type = self.check_expression(&for_loop.end_range); - - let start_span = self.interner.expr_span(&for_loop.start_range); - let end_span = self.interner.expr_span(&for_loop.end_range); - - // Check that start range and end range have the same types - let range_span = start_span.merge(end_span); - self.unify(&start_range_type, &end_range_type, || TypeCheckError::TypeMismatch { - expected_typ: start_range_type.to_string(), - expr_typ: end_range_type.to_string(), - expr_span: range_span, - }); - - let expected_type = self.polymorphic_integer(); - - self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { - typ: start_range_type.clone(), - place: "for loop", - span: range_span, - }); - - self.interner.push_definition_type(for_loop.identifier.id, start_range_type); - - self.check_expression(&for_loop.block); - } - - /// Associate a given HirPattern with the given Type, and remember - /// this association in the NodeInterner. - pub(crate) fn bind_pattern(&mut self, pattern: &HirPattern, typ: Type) { - match pattern { - HirPattern::Identifier(ident) => self.interner.push_definition_type(ident.id, typ), - HirPattern::Mutable(pattern, _) => self.bind_pattern(pattern, typ), - HirPattern::Tuple(fields, location) => match typ.follow_bindings() { - Type::Tuple(field_types) if field_types.len() == fields.len() => { - for (field, field_type) in fields.iter().zip(field_types) { - self.bind_pattern(field, field_type); - } - } - Type::Error => (), - other => { - let expected = - Type::Tuple(vecmap(fields, |_| self.interner.next_type_variable())); - - self.errors.push(TypeCheckError::TypeMismatchWithSource { - expected, - actual: other, - span: location.span, - source: Source::Assignment, - }); - } - }, - HirPattern::Struct(struct_type, fields, location) => { - self.unify(struct_type, &typ, || TypeCheckError::TypeMismatchWithSource { - expected: struct_type.clone(), - actual: typ.clone(), - span: location.span, - source: Source::Assignment, - }); - - if let Type::Struct(struct_type, generics) = struct_type.follow_bindings() { - let struct_type = struct_type.borrow(); - - for (field_name, field_pattern) in fields { - if let Some((type_field, _)) = - struct_type.get_field(&field_name.0.contents, &generics) - { - self.bind_pattern(field_pattern, type_field); - } - } - } - } - } - } - - fn check_assign_stmt(&mut self, assign_stmt: HirAssignStatement, stmt_id: &StmtId) { - let expr_type = self.check_expression(&assign_stmt.expression); - let span = self.interner.expr_span(&assign_stmt.expression); - let (lvalue_type, new_lvalue, mutable) = self.check_lvalue(&assign_stmt.lvalue, span); - - if !mutable { - let (name, span) = self.get_lvalue_name_and_span(&assign_stmt.lvalue); - self.errors.push(TypeCheckError::VariableMustBeMutable { name, span }); - } - - // Must push new lvalue to the interner, we've resolved any field indices - self.interner.update_statement(stmt_id, |stmt| match stmt { - HirStatement::Assign(assign) => assign.lvalue = new_lvalue, - _ => unreachable!("statement is known to be assignment"), - }); - - let span = self.interner.expr_span(&assign_stmt.expression); - self.unify_with_coercions(&expr_type, &lvalue_type, assign_stmt.expression, || { - TypeCheckError::TypeMismatchWithSource { - actual: expr_type.clone(), - expected: lvalue_type.clone(), - span, - source: Source::Assignment, - } - }); - } - - fn get_lvalue_name_and_span(&self, lvalue: &HirLValue) -> (String, Span) { - match lvalue { - HirLValue::Ident(name, _) => { - let span = name.location.span; - - if let Some(definition) = self.interner.try_definition(name.id) { - (definition.name.clone(), span) - } else { - ("(undeclared variable)".into(), span) - } - } - HirLValue::MemberAccess { object, .. } => self.get_lvalue_name_and_span(object), - HirLValue::Index { array, .. } => self.get_lvalue_name_and_span(array), - HirLValue::Dereference { lvalue, .. } => self.get_lvalue_name_and_span(lvalue), - } - } - - /// Type check an lvalue - the left hand side of an assignment statement. - fn check_lvalue(&mut self, lvalue: &HirLValue, assign_span: Span) -> (Type, HirLValue, bool) { - match lvalue { - HirLValue::Ident(ident, _) => { - let mut mutable = true; - - let typ = if ident.id == DefinitionId::dummy_id() { - Type::Error - } else { - if let Some(definition) = self.interner.try_definition(ident.id) { - mutable = definition.mutable; - } - - let typ = self.interner.definition_type(ident.id).instantiate(self.interner).0; - typ.follow_bindings() - }; - - (typ.clone(), HirLValue::Ident(ident.clone(), typ), mutable) - } - HirLValue::MemberAccess { object, field_name, location, .. } => { - let (lhs_type, object, mut mutable) = self.check_lvalue(object, assign_span); - let mut object = Box::new(object); - let field_name = field_name.clone(); - - let object_ref = &mut object; - let mutable_ref = &mut mutable; - let location = *location; - - let dereference_lhs = move |_: &mut Self, _, element_type| { - // We must create a temporary value first to move out of object_ref before - // we eventually reassign to it. - let id = DefinitionId::dummy_id(); - let ident = HirIdent::non_trait_method(id, location); - let tmp_value = HirLValue::Ident(ident, Type::Error); - - let lvalue = std::mem::replace(object_ref, Box::new(tmp_value)); - *object_ref = - Box::new(HirLValue::Dereference { lvalue, element_type, location }); - *mutable_ref = true; - }; - - let name = &field_name.0.contents; - let (object_type, field_index) = self - .check_field_access(&lhs_type, name, field_name.span(), Some(dereference_lhs)) - .unwrap_or((Type::Error, 0)); - - let field_index = Some(field_index); - let typ = object_type.clone(); - let lvalue = - HirLValue::MemberAccess { object, field_name, field_index, typ, location }; - (object_type, lvalue, mutable) - } - HirLValue::Index { array, index, location, .. } => { - let index_type = self.check_expression(index); - let expr_span = self.interner.expr_span(index); - let location = *location; - - index_type.unify(&self.polymorphic_integer_or_field(), &mut self.errors, || { - TypeCheckError::TypeMismatch { - expected_typ: "an integer".to_owned(), - expr_typ: index_type.to_string(), - expr_span, - } - }); - - let (mut lvalue_type, mut lvalue, mut mutable) = - self.check_lvalue(array, assign_span); - - // Before we check that the lvalue is an array, try to dereference it as many times - // as needed to unwrap any &mut wrappers. - while let Type::MutableReference(element) = lvalue_type.follow_bindings() { - let element_type = element.as_ref().clone(); - lvalue = - HirLValue::Dereference { lvalue: Box::new(lvalue), element_type, location }; - lvalue_type = *element; - // We know this value to be mutable now since we found an `&mut` - mutable = true; - } - - let typ = match lvalue_type.follow_bindings() { - Type::Array(_, elem_type) => *elem_type, - Type::Slice(elem_type) => *elem_type, - Type::Error => Type::Error, - Type::String(_) => { - let (_lvalue_name, lvalue_span) = self.get_lvalue_name_and_span(&lvalue); - self.errors.push(TypeCheckError::StringIndexAssign { span: lvalue_span }); - Type::Error - } - other => { - // TODO: Need a better span here - self.errors.push(TypeCheckError::TypeMismatch { - expected_typ: "array".to_string(), - expr_typ: other.to_string(), - expr_span: assign_span, - }); - Type::Error - } - }; - - let array = Box::new(lvalue); - (typ.clone(), HirLValue::Index { array, index: *index, typ, location }, mutable) - } - HirLValue::Dereference { lvalue, element_type: _, location } => { - let (reference_type, lvalue, _) = self.check_lvalue(lvalue, assign_span); - let lvalue = Box::new(lvalue); - let location = *location; - - let element_type = Type::type_variable(self.interner.next_type_variable_id()); - let expected_type = Type::MutableReference(Box::new(element_type.clone())); - - self.unify(&reference_type, &expected_type, || TypeCheckError::TypeMismatch { - expected_typ: expected_type.to_string(), - expr_typ: reference_type.to_string(), - expr_span: assign_span, - }); - - // Dereferences are always mutable since we already type checked against a &mut T - ( - element_type.clone(), - HirLValue::Dereference { lvalue, element_type, location }, - true, - ) - } - } - } - - fn check_let_stmt(&mut self, let_stmt: HirLetStatement) { - let resolved_type = self.check_declaration(let_stmt.expression, let_stmt.r#type); - - // Set the type of the pattern to be equal to the annotated type - self.bind_pattern(&let_stmt.pattern, resolved_type); - } - - fn check_constrain_stmt(&mut self, stmt: HirConstrainStatement) { - let expr_type = self.check_expression(&stmt.0); - let expr_span = self.interner.expr_span(&stmt.0); - - // Must type check the assertion message expression so that we instantiate bindings - stmt.2.map(|assert_msg_expr| self.check_expression(&assert_msg_expr)); - - self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { - expr_typ: expr_type.to_string(), - expected_typ: Type::Bool.to_string(), - expr_span, - }); - } - - /// All declaration statements check that the user specified type(UST) is equal to the - /// expression on the RHS, unless the UST is unspecified in which case - /// the type of the declaration is inferred to match the RHS. - fn check_declaration(&mut self, rhs_expr: ExprId, annotated_type: Type) -> Type { - // Type check the expression on the RHS - let expr_type = self.check_expression(&rhs_expr); - - // First check if the LHS is unspecified - // If so, then we give it the same type as the expression - if annotated_type != Type::Error { - // Now check if LHS is the same type as the RHS - // Importantly, we do not coerce any types implicitly - let expr_span = self.interner.expr_span(&rhs_expr); - - self.unify_with_coercions(&expr_type, &annotated_type, rhs_expr, || { - TypeCheckError::TypeMismatch { - expected_typ: annotated_type.to_string(), - expr_typ: expr_type.to_string(), - expr_span, - } - }); - if annotated_type.is_unsigned() { - self.lint_overflowing_uint(&rhs_expr, &annotated_type); - } - annotated_type - } else { - expr_type - } - } - - /// Check if an assignment is overflowing with respect to `annotated_type` - /// in a declaration statement where `annotated_type` is an unsigned integer - fn lint_overflowing_uint(&mut self, rhs_expr: &ExprId, annotated_type: &Type) { - let expr = self.interner.expression(rhs_expr); - let span = self.interner.expr_span(rhs_expr); - match expr { - HirExpression::Literal(HirLiteral::Integer(value, false)) => { - let v = value.to_u128(); - if let Type::Integer(_, bit_count) = annotated_type { - let bit_count: u32 = (*bit_count).into(); - let max = 1 << bit_count; - if v >= max { - self.errors.push(TypeCheckError::OverflowingAssignment { - expr: -value, - ty: annotated_type.clone(), - range: format!("0..={}", max - 1), - span, - }); - }; - }; - } - HirExpression::Prefix(expr) => { - self.lint_overflowing_uint(&expr.rhs, annotated_type); - if matches!(expr.operator, UnaryOp::Minus) { - self.errors.push(TypeCheckError::InvalidUnaryOp { - kind: "annotated_type".to_string(), - span, - }); - } - } - HirExpression::Infix(expr) => { - self.lint_overflowing_uint(&expr.lhs, annotated_type); - self.lint_overflowing_uint(&expr.rhs, annotated_type); - } - _ => {} - } - } -} diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index ab2344746d1..e85d30f0c32 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -40,13 +40,6 @@ pub enum HirExpression { Error, } -impl HirExpression { - /// Returns an empty block expression - pub const fn empty_block() -> HirExpression { - HirExpression::Block(HirBlockExpression { statements: vec![] }) - } -} - /// Corresponds to a variable in the source code #[derive(Debug, Clone)] pub struct HirIdent { @@ -178,6 +171,7 @@ pub struct HirCallExpression { pub func: ExprId, pub arguments: Vec, pub location: Location, + pub is_macro_call: bool, } /// These nodes are temporary, they're @@ -215,6 +209,7 @@ impl HirMethodCallExpression { mut self, method: &HirMethodReference, object_type: Type, + is_macro_call: bool, location: Location, interner: &mut NodeInterner, ) -> ((ExprId, HirIdent), HirCallExpression) { @@ -231,6 +226,7 @@ impl HirMethodCallExpression { typ: object_type, trait_id: method_id.trait_id, trait_generics: generics.clone(), + span: location.span, }; (id, ImplKind::TraitMethod(*method_id, constraint, false)) } @@ -238,7 +234,7 @@ impl HirMethodCallExpression { let func_var = HirIdent { location, id, impl_kind }; let func = interner.push_expr(HirExpression::Ident(func_var.clone(), self.generics)); interner.push_expr_location(func, location.span, location.file); - let expr = HirCallExpression { func, arguments, location }; + let expr = HirCallExpression { func, arguments, location, is_macro_call }; ((func, func_var), expr) } } diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index fa8bb55abee..b9f6af0c4c3 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -6,30 +6,35 @@ use super::stmt::HirPattern; use super::traits::TraitConstraint; use crate::ast::{FunctionKind, FunctionReturnType, Visibility}; use crate::graph::CrateId; -use crate::macros_api::BlockExpression; -use crate::node_interner::{ExprId, NodeInterner, TraitImplId}; +use crate::hir::def_map::LocalModuleId; +use crate::macros_api::{BlockExpression, StructId}; +use crate::node_interner::{ExprId, NodeInterner, TraitId, TraitImplId}; use crate::{ResolvedGeneric, Type}; -/// A Hir function is a block expression -/// with a list of statements +/// A Hir function is a block expression with a list of statements. +/// If the function has yet to be resolved, the body starts off empty (None). #[derive(Debug, Clone)] -pub struct HirFunction(ExprId); +pub struct HirFunction(Option); impl HirFunction { pub fn empty() -> HirFunction { - HirFunction(ExprId::empty_block_id()) + HirFunction(None) } pub const fn unchecked_from_expr(expr_id: ExprId) -> HirFunction { - HirFunction(expr_id) + HirFunction(Some(expr_id)) } - pub const fn as_expr(&self) -> ExprId { + pub fn as_expr(&self) -> ExprId { + self.0.expect("Function has yet to be elaborated, cannot get an ExprId of its body!") + } + + pub fn try_as_expr(&self) -> Option { self.0 } pub fn block(&self, interner: &NodeInterner) -> HirBlockExpression { - match interner.expression(&self.0) { + match interner.expression(&self.as_expr()) { HirExpression::Block(block_expr) => block_expr, _ => unreachable!("ice: functions can only be block expressions"), } @@ -126,6 +131,12 @@ pub struct FuncMeta { pub trait_constraints: Vec, + /// The struct this function belongs to, if any + pub struct_id: Option, + + // The trait this function belongs to, if any + pub trait_id: Option, + /// The trait impl this function belongs to, if any pub trait_impl: Option, @@ -133,11 +144,6 @@ pub struct FuncMeta { /// For non-contracts, this means the function is `main`. pub is_entry_point: bool, - /// True if this function was defined within a trait (not a trait impl!). - /// Trait functions are just stubs and shouldn't have their return type checked - /// against their body type, nor should unused variables be checked. - pub is_trait_function: bool, - /// True if this function is marked with an attribute /// that indicates it should be inlined differently than the default (inline everything). /// For example, such as `fold` (never inlined) or `no_predicates` (inlined after flattening) @@ -147,6 +153,9 @@ pub struct FuncMeta { /// The crate this function was defined in pub source_crate: CrateId, + + /// The module this function was defined in + pub source_module: LocalModuleId, } #[derive(Debug, Clone)] @@ -163,7 +172,7 @@ impl FuncMeta { /// We don't check the return type of these functions since it will always have /// an empty body, and we don't check for unused parameters. pub fn is_stub(&self) -> bool { - self.kind.can_ignore_return_type() || self.is_trait_function + self.kind.can_ignore_return_type() || self.trait_id.is_some() } pub fn function_signature(&self) -> FunctionSignature { diff --git a/compiler/noirc_frontend/src/hir_def/traits.rs b/compiler/noirc_frontend/src/hir_def/traits.rs index 0600706922b..099c9ea78f7 100644 --- a/compiler/noirc_frontend/src/hir_def/traits.rs +++ b/compiler/noirc_frontend/src/hir_def/traits.rs @@ -16,6 +16,8 @@ pub struct TraitFunction { pub location: Location, pub default_impl: Option>, pub default_impl_module_id: crate::hir::def_map::LocalModuleId, + pub trait_constraints: Vec, + pub direct_generics: Generics, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -82,16 +84,17 @@ pub struct TraitImpl { pub where_clause: Vec, } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct TraitConstraint { pub typ: Type, pub trait_id: TraitId, pub trait_generics: Vec, + pub span: Span, } impl TraitConstraint { - pub fn new(typ: Type, trait_id: TraitId, trait_generics: Vec) -> Self { - Self { typ, trait_id, trait_generics } + pub fn new(typ: Type, trait_id: TraitId, trait_generics: Vec, span: Span) -> Self { + Self { typ, trait_id, trait_generics, span } } pub fn apply_bindings(&mut self, type_bindings: &TypeBindings) { diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 8183911c845..0ec975a04db 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -114,79 +114,6 @@ pub enum Type { Error, } -impl Type { - /// Returns the number of field elements required to represent the type once encoded. - pub fn field_count(&self) -> u32 { - match self { - Type::FieldElement | Type::Integer { .. } | Type::Bool => 1, - Type::Array(size, typ) => { - let length = size - .evaluate_to_u32() - .expect("Cannot have variable sized arrays as a parameter to main"); - let typ = typ.as_ref(); - length * typ.field_count() - } - Type::Struct(def, args) => { - let struct_type = def.borrow(); - let fields = struct_type.get_fields(args); - fields.iter().fold(0, |acc, (_, field_type)| acc + field_type.field_count()) - } - Type::Alias(def, generics) => def.borrow().get_type(generics).field_count(), - Type::Tuple(fields) => { - fields.iter().fold(0, |acc, field_typ| acc + field_typ.field_count()) - } - Type::String(size) => size - .evaluate_to_u32() - .expect("Cannot have variable sized strings as a parameter to main"), - Type::FmtString(_, _) - | Type::Unit - | Type::TypeVariable(_, _) - | Type::TraitAsType(..) - | Type::NamedGeneric(_, _, _) - | Type::Function(_, _, _) - | Type::MutableReference(_) - | Type::Forall(_, _) - | Type::Constant(_) - | Type::Quoted(_) - | Type::Slice(_) - | Type::Error => unreachable!("This type cannot exist as a parameter to main"), - } - } - - pub(crate) fn is_nested_slice(&self) -> bool { - match self { - Type::Slice(elem) => elem.as_ref().contains_slice(), - Type::Array(_, elem) => elem.as_ref().contains_slice(), - Type::Alias(alias, generics) => alias.borrow().get_type(generics).is_nested_slice(), - _ => false, - } - } - - pub(crate) fn contains_slice(&self) -> bool { - match self { - Type::Slice(_) => true, - Type::Struct(struct_typ, generics) => { - let fields = struct_typ.borrow().get_fields(generics); - for field in fields.iter() { - if field.1.contains_slice() { - return true; - } - } - false - } - Type::Tuple(types) => { - for typ in types.iter() { - if typ.contains_slice() { - return true; - } - } - false - } - _ => false, - } - } -} - /// A Kind is the type of a Type. These are used since only certain kinds of types are allowed in /// certain positions. /// @@ -215,6 +142,10 @@ pub enum QuotedType { TopLevelItem, Type, StructDefinition, + TraitConstraint, + TraitDefinition, + FunctionDefinition, + Module, } /// A list of TypeVariableIds to bind to a type. Storing the @@ -588,13 +519,15 @@ impl TypeVariable { /// variable is already bound to a different type. This generally /// a logic error to use outside of monomorphization. pub fn force_bind(&self, typ: Type) { - *self.1.borrow_mut() = TypeBinding::Bound(typ); + if !typ.occurs(self.id()) { + *self.1.borrow_mut() = TypeBinding::Bound(typ); + } } } /// TypeBindings are the mutable insides of a TypeVariable. /// They are either bound to some type, or are unbound. -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeBinding { Bound(Type), Unbound(TypeVariableId), @@ -610,6 +543,156 @@ impl TypeBinding { #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct TypeVariableId(pub usize); +impl std::fmt::Display for Type { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Type::FieldElement => { + write!(f, "Field") + } + Type::Array(len, typ) => { + write!(f, "[{typ}; {len}]") + } + Type::Slice(typ) => { + write!(f, "[{typ}]") + } + Type::Integer(sign, num_bits) => match sign { + Signedness::Signed => write!(f, "i{num_bits}"), + Signedness::Unsigned => write!(f, "u{num_bits}"), + }, + Type::TypeVariable(var, TypeVariableKind::Normal) => write!(f, "{}", var.borrow()), + Type::TypeVariable(binding, TypeVariableKind::Integer) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + write!(f, "{}", Type::default_int_type()) + } else { + write!(f, "{}", binding.borrow()) + } + } + Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + // Show a Field by default if this TypeVariableKind::IntegerOrField is unbound, since that is + // what they bind to by default anyway. It is less confusing than displaying it + // as a generic. + write!(f, "Field") + } else { + write!(f, "{}", binding.borrow()) + } + } + Type::TypeVariable(binding, TypeVariableKind::Constant(n)) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + // TypeVariableKind::Constant(n) binds to Type::Constant(n) by default, so just show that. + write!(f, "{n}") + } else { + write!(f, "{}", binding.borrow()) + } + } + Type::Struct(s, args) => { + let args = vecmap(args, |arg| arg.to_string()); + if args.is_empty() { + write!(f, "{}", s.borrow()) + } else { + write!(f, "{}<{}>", s.borrow(), args.join(", ")) + } + } + Type::Alias(alias, args) => { + let args = vecmap(args, |arg| arg.to_string()); + if args.is_empty() { + write!(f, "{}", alias.borrow()) + } else { + write!(f, "{}<{}>", alias.borrow(), args.join(", ")) + } + } + Type::TraitAsType(_id, name, generics) => { + write!(f, "impl {}", name)?; + if !generics.is_empty() { + let generics = vecmap(generics, ToString::to_string).join(", "); + write!(f, "<{generics}>")?; + } + Ok(()) + } + Type::Tuple(elements) => { + let elements = vecmap(elements, ToString::to_string); + write!(f, "({})", elements.join(", ")) + } + Type::Bool => write!(f, "bool"), + Type::String(len) => write!(f, "str<{len}>"), + Type::FmtString(len, elements) => { + write!(f, "fmtstr<{len}, {elements}>") + } + Type::Unit => write!(f, "()"), + Type::Error => write!(f, "error"), + Type::NamedGeneric(binding, name, _) => match &*binding.borrow() { + TypeBinding::Bound(binding) => binding.fmt(f), + TypeBinding::Unbound(_) if name.is_empty() => write!(f, "_"), + TypeBinding::Unbound(_) => write!(f, "{name}"), + }, + Type::Constant(x) => x.fmt(f), + Type::Forall(typevars, typ) => { + let typevars = vecmap(typevars, |var| var.id().to_string()); + write!(f, "forall {}. {}", typevars.join(" "), typ) + } + Type::Function(args, ret, env) => { + let closure_env_text = match **env { + Type::Unit => "".to_string(), + _ => format!(" with env {env}"), + }; + + let args = vecmap(args.iter(), ToString::to_string); + + write!(f, "fn({}) -> {ret}{closure_env_text}", args.join(", ")) + } + Type::MutableReference(element) => { + write!(f, "&mut {element}") + } + Type::Quoted(quoted) => write!(f, "{}", quoted), + } + } +} + +impl std::fmt::Display for BinaryTypeOperator { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BinaryTypeOperator::Addition => write!(f, "+"), + BinaryTypeOperator::Subtraction => write!(f, "-"), + BinaryTypeOperator::Multiplication => write!(f, "*"), + BinaryTypeOperator::Division => write!(f, "/"), + BinaryTypeOperator::Modulo => write!(f, "%"), + } + } +} + +impl std::fmt::Display for TypeVariableId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "_") + } +} + +impl std::fmt::Display for TypeBinding { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TypeBinding::Bound(typ) => typ.fmt(f), + TypeBinding::Unbound(id) => id.fmt(f), + } + } +} + +impl std::fmt::Display for QuotedType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + QuotedType::Expr => write!(f, "Expr"), + QuotedType::Quoted => write!(f, "Quoted"), + QuotedType::TopLevelItem => write!(f, "TopLevelItem"), + QuotedType::Type => write!(f, "Type"), + QuotedType::StructDefinition => write!(f, "StructDefinition"), + QuotedType::TraitDefinition => write!(f, "TraitDefinition"), + QuotedType::TraitConstraint => write!(f, "TraitConstraint"), + QuotedType::FunctionDefinition => write!(f, "FunctionDefinition"), + QuotedType::Module => write!(f, "Module"), + } + } +} + +pub struct UnificationError; + impl Type { pub fn default_int_or_field_type() -> Type { Type::FieldElement @@ -1044,155 +1127,78 @@ impl Type { // | Type::Error => Kind::Normal, // } // } -} -impl std::fmt::Display for Type { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + /// Returns the number of field elements required to represent the type once encoded. + pub fn field_count(&self) -> u32 { match self { - Type::FieldElement => { - write!(f, "Field") - } - Type::Array(len, typ) => { - write!(f, "[{typ}; {len}]") - } - Type::Slice(typ) => { - write!(f, "[{typ}]") - } - Type::Integer(sign, num_bits) => match sign { - Signedness::Signed => write!(f, "i{num_bits}"), - Signedness::Unsigned => write!(f, "u{num_bits}"), - }, - Type::TypeVariable(var, TypeVariableKind::Normal) => write!(f, "{}", var.borrow()), - Type::TypeVariable(binding, TypeVariableKind::Integer) => { - if let TypeBinding::Unbound(_) = &*binding.borrow() { - write!(f, "{}", Type::default_int_type()) - } else { - write!(f, "{}", binding.borrow()) - } - } - Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { - if let TypeBinding::Unbound(_) = &*binding.borrow() { - // Show a Field by default if this TypeVariableKind::IntegerOrField is unbound, since that is - // what they bind to by default anyway. It is less confusing than displaying it - // as a generic. - write!(f, "Field") - } else { - write!(f, "{}", binding.borrow()) - } - } - Type::TypeVariable(binding, TypeVariableKind::Constant(n)) => { - if let TypeBinding::Unbound(_) = &*binding.borrow() { - // TypeVariableKind::Constant(n) binds to Type::Constant(n) by default, so just show that. - write!(f, "{n}") - } else { - write!(f, "{}", binding.borrow()) - } - } - Type::Struct(s, args) => { - let args = vecmap(args, |arg| arg.to_string()); - if args.is_empty() { - write!(f, "{}", s.borrow()) - } else { - write!(f, "{}<{}>", s.borrow(), args.join(", ")) - } - } - Type::Alias(alias, args) => { - let args = vecmap(args, |arg| arg.to_string()); - if args.is_empty() { - write!(f, "{}", alias.borrow()) - } else { - write!(f, "{}<{}>", alias.borrow(), args.join(", ")) - } - } - Type::TraitAsType(_id, name, generics) => { - write!(f, "impl {}", name)?; - if !generics.is_empty() { - let generics = vecmap(generics, ToString::to_string).join(", "); - write!(f, "<{generics}>")?; - } - Ok(()) - } - Type::Tuple(elements) => { - let elements = vecmap(elements, ToString::to_string); - write!(f, "({})", elements.join(", ")) - } - Type::Bool => write!(f, "bool"), - Type::String(len) => write!(f, "str<{len}>"), - Type::FmtString(len, elements) => { - write!(f, "fmtstr<{len}, {elements}>") - } - Type::Unit => write!(f, "()"), - Type::Error => write!(f, "error"), - Type::NamedGeneric(binding, name, _) => match &*binding.borrow() { - TypeBinding::Bound(binding) => binding.fmt(f), - TypeBinding::Unbound(_) if name.is_empty() => write!(f, "_"), - TypeBinding::Unbound(_) => write!(f, "{name}"), - }, - Type::Constant(x) => x.fmt(f), - Type::Forall(typevars, typ) => { - let typevars = vecmap(typevars, |var| var.id().to_string()); - write!(f, "forall {}. {}", typevars.join(" "), typ) + Type::FieldElement | Type::Integer { .. } | Type::Bool => 1, + Type::Array(size, typ) => { + let length = size + .evaluate_to_u32() + .expect("Cannot have variable sized arrays as a parameter to main"); + let typ = typ.as_ref(); + length * typ.field_count() } - Type::Function(args, ret, env) => { - let closure_env_text = match **env { - Type::Unit => "".to_string(), - _ => format!(" with env {env}"), - }; - - let args = vecmap(args.iter(), ToString::to_string); - - write!(f, "fn({}) -> {ret}{closure_env_text}", args.join(", ")) + Type::Struct(def, args) => { + let struct_type = def.borrow(); + let fields = struct_type.get_fields(args); + fields.iter().fold(0, |acc, (_, field_type)| acc + field_type.field_count()) } - Type::MutableReference(element) => { - write!(f, "&mut {element}") + Type::Alias(def, generics) => def.borrow().get_type(generics).field_count(), + Type::Tuple(fields) => { + fields.iter().fold(0, |acc, field_typ| acc + field_typ.field_count()) } - Type::Quoted(quoted) => write!(f, "{}", quoted), - } - } -} - -impl std::fmt::Display for BinaryTypeOperator { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - BinaryTypeOperator::Addition => write!(f, "+"), - BinaryTypeOperator::Subtraction => write!(f, "-"), - BinaryTypeOperator::Multiplication => write!(f, "*"), - BinaryTypeOperator::Division => write!(f, "/"), - BinaryTypeOperator::Modulo => write!(f, "%"), + Type::String(size) => size + .evaluate_to_u32() + .expect("Cannot have variable sized strings as a parameter to main"), + Type::FmtString(_, _) + | Type::Unit + | Type::TypeVariable(_, _) + | Type::TraitAsType(..) + | Type::NamedGeneric(_, _, _) + | Type::Function(_, _, _) + | Type::MutableReference(_) + | Type::Forall(_, _) + | Type::Constant(_) + | Type::Quoted(_) + | Type::Slice(_) + | Type::Error => unreachable!("This type cannot exist as a parameter to main"), } } -} - -impl std::fmt::Display for TypeVariableId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "_") - } -} -impl std::fmt::Display for TypeBinding { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + pub(crate) fn is_nested_slice(&self) -> bool { match self { - TypeBinding::Bound(typ) => typ.fmt(f), - TypeBinding::Unbound(id) => id.fmt(f), + Type::Slice(elem) => elem.as_ref().contains_slice(), + Type::Array(_, elem) => elem.as_ref().contains_slice(), + Type::Alias(alias, generics) => alias.borrow().get_type(generics).is_nested_slice(), + _ => false, } } -} -impl std::fmt::Display for QuotedType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + pub(crate) fn contains_slice(&self) -> bool { match self { - QuotedType::Expr => write!(f, "Expr"), - QuotedType::Quoted => write!(f, "Quoted"), - QuotedType::TopLevelItem => write!(f, "TopLevelItem"), - QuotedType::Type => write!(f, "Type"), - QuotedType::StructDefinition => write!(f, "StructDefinition"), + Type::Slice(_) => true, + Type::Struct(struct_typ, generics) => { + let fields = struct_typ.borrow().get_fields(generics); + for field in fields.iter() { + if field.1.contains_slice() { + return true; + } + } + false + } + Type::Tuple(types) => { + for typ in types.iter() { + if typ.contains_slice() { + return true; + } + } + false + } + _ => false, } } -} -pub struct UnificationError; - -impl Type { /// Try to bind a MaybeConstant variable to self, succeeding if self is a Constant, /// MaybeConstant, or type variable. If successful, the binding is placed in the /// given TypeBindings map rather than linked immediately. @@ -1920,9 +1926,11 @@ impl Type { generic_args.iter().any(|arg| arg.occurs(target_id)) } Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), - Type::NamedGeneric(binding, _, _) | Type::TypeVariable(binding, _) => { - match &*binding.borrow() { - TypeBinding::Bound(binding) => binding.occurs(target_id), + Type::NamedGeneric(type_var, _, _) | Type::TypeVariable(type_var, _) => { + match &*type_var.borrow() { + TypeBinding::Bound(binding) => { + type_var.id() == target_id || binding.occurs(target_id) + } TypeBinding::Unbound(id) => *id == target_id, } } @@ -2007,6 +2015,83 @@ impl Type { pub fn from_generics(generics: &GenericTypeVars) -> Vec { vecmap(generics, |var| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) } + + /// Replace any `Type::NamedGeneric` in this type with a `Type::TypeVariable` + /// using to the same inner `TypeVariable`. This is used during monomorphization + /// to bind to named generics since they are unbindable during type checking. + pub fn replace_named_generics_with_type_variables(&mut self) { + match self { + Type::FieldElement + | Type::Constant(_) + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Error + | Type::Quoted(_) => (), + + Type::Array(len, elem) => { + len.replace_named_generics_with_type_variables(); + elem.replace_named_generics_with_type_variables(); + } + + Type::Slice(elem) => elem.replace_named_generics_with_type_variables(), + Type::String(len) => len.replace_named_generics_with_type_variables(), + Type::FmtString(len, captures) => { + len.replace_named_generics_with_type_variables(); + captures.replace_named_generics_with_type_variables(); + } + Type::Tuple(fields) => { + for field in fields { + field.replace_named_generics_with_type_variables(); + } + } + Type::Struct(_, generics) => { + for generic in generics { + generic.replace_named_generics_with_type_variables(); + } + } + Type::Alias(alias, generics) => { + let mut typ = alias.borrow().get_type(generics); + typ.replace_named_generics_with_type_variables(); + *self = typ; + } + Type::TypeVariable(var, _) => { + let var = var.borrow(); + if let TypeBinding::Bound(binding) = &*var { + let mut binding = binding.clone(); + drop(var); + binding.replace_named_generics_with_type_variables(); + *self = binding; + } + } + Type::TraitAsType(_, _, generics) => { + for generic in generics { + generic.replace_named_generics_with_type_variables(); + } + } + Type::NamedGeneric(var, _, _) => { + let type_binding = var.borrow(); + if let TypeBinding::Bound(binding) = &*type_binding { + let mut binding = binding.clone(); + drop(type_binding); + binding.replace_named_generics_with_type_variables(); + *self = binding; + } else { + drop(type_binding); + *self = Type::TypeVariable(var.clone(), TypeVariableKind::Normal); + } + } + Type::Function(args, ret, env) => { + for arg in args { + arg.replace_named_generics_with_type_variables(); + } + ret.replace_named_generics_with_type_variables(); + env.replace_named_generics_with_type_variables(); + } + Type::MutableReference(elem) => elem.replace_named_generics_with_type_variables(), + Type::Forall(_, typ) => typ.replace_named_generics_with_type_variables(), + } + } } /// Wraps a given `expression` in `expression.as_slice()` @@ -2033,7 +2118,8 @@ fn convert_array_expression_to_slice( interner.push_expr_location(argument, location.span, location.file); let arguments = vec![argument]; - let call = HirExpression::Call(HirCallExpression { func, arguments, location }); + let is_macro_call = false; + let call = HirExpression::Call(HirCallExpression { func, arguments, location, is_macro_call }); interner.replace_expr(&expression, call); interner.push_expr_location(func, location.span, location.file); @@ -2203,10 +2289,10 @@ impl std::fmt::Debug for Type { Type::Error => write!(f, "error"), Type::NamedGeneric(binding, name, kind) => match kind { Kind::Normal => { - write!(f, "{} -> {:?}", name, binding) + write!(f, "{}{:?}", name, binding) } Kind::Numeric(typ) => { - write!(f, "({} : {}) -> {:?}", name, typ, binding) + write!(f, "({} : {}){:?}", name, typ, binding) } }, Type::Constant(x) => x.fmt(f), diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 41de13fb17e..c6a1d44f26b 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -895,12 +895,14 @@ pub enum Keyword { Fn, For, FormatString, + FunctionDefinition, Global, If, Impl, In, Let, Mod, + Module, Mut, Pub, Quoted, @@ -908,12 +910,14 @@ pub enum Keyword { ReturnData, String, Struct, + StructDefinition, Super, TopLevelItem, Trait, + TraitConstraint, + TraitDefinition, Type, TypeType, - StructDefinition, Unchecked, Unconstrained, Use, @@ -943,12 +947,14 @@ impl fmt::Display for Keyword { Keyword::Fn => write!(f, "fn"), Keyword::For => write!(f, "for"), Keyword::FormatString => write!(f, "fmtstr"), + Keyword::FunctionDefinition => write!(f, "FunctionDefinition"), Keyword::Global => write!(f, "global"), Keyword::If => write!(f, "if"), Keyword::Impl => write!(f, "impl"), Keyword::In => write!(f, "in"), Keyword::Let => write!(f, "let"), Keyword::Mod => write!(f, "mod"), + Keyword::Module => write!(f, "Module"), Keyword::Mut => write!(f, "mut"), Keyword::Pub => write!(f, "pub"), Keyword::Quoted => write!(f, "Quoted"), @@ -956,12 +962,14 @@ impl fmt::Display for Keyword { Keyword::ReturnData => write!(f, "return_data"), Keyword::String => write!(f, "str"), Keyword::Struct => write!(f, "struct"), + Keyword::StructDefinition => write!(f, "StructDefinition"), Keyword::Super => write!(f, "super"), Keyword::TopLevelItem => write!(f, "TopLevelItem"), Keyword::Trait => write!(f, "trait"), + Keyword::TraitConstraint => write!(f, "TraitConstraint"), + Keyword::TraitDefinition => write!(f, "TraitDefinition"), Keyword::Type => write!(f, "type"), Keyword::TypeType => write!(f, "Type"), - Keyword::StructDefinition => write!(f, "StructDefinition"), Keyword::Unchecked => write!(f, "unchecked"), Keyword::Unconstrained => write!(f, "unconstrained"), Keyword::Use => write!(f, "use"), @@ -994,12 +1002,14 @@ impl Keyword { "fn" => Keyword::Fn, "for" => Keyword::For, "fmtstr" => Keyword::FormatString, + "FunctionDefinition" => Keyword::FunctionDefinition, "global" => Keyword::Global, "if" => Keyword::If, "impl" => Keyword::Impl, "in" => Keyword::In, "let" => Keyword::Let, "mod" => Keyword::Mod, + "Module" => Keyword::Module, "mut" => Keyword::Mut, "pub" => Keyword::Pub, "Quoted" => Keyword::Quoted, @@ -1010,6 +1020,8 @@ impl Keyword { "super" => Keyword::Super, "TopLevelItem" => Keyword::TopLevelItem, "trait" => Keyword::Trait, + "TraitConstraint" => Keyword::TraitConstraint, + "TraitDefinition" => Keyword::TraitDefinition, "type" => Keyword::Type, "Type" => Keyword::TypeType, "StructDefinition" => Keyword::StructDefinition, diff --git a/compiler/noirc_frontend/src/locations.rs b/compiler/noirc_frontend/src/locations.rs index 0efe385aa0a..0ba74e22781 100644 --- a/compiler/noirc_frontend/src/locations.rs +++ b/compiler/noirc_frontend/src/locations.rs @@ -3,7 +3,11 @@ use noirc_errors::Location; use rangemap::RangeMap; use rustc_hash::FxHashMap; -use crate::{macros_api::NodeInterner, node_interner::ReferenceId}; +use crate::{ + hir::def_map::{ModuleDefId, ModuleId}, + macros_api::{NodeInterner, StructId}, + node_interner::{DefinitionId, FuncId, GlobalId, ReferenceId, TraitId, TypeAliasId}, +}; use petgraph::prelude::NodeIndex as PetGraphIndex; #[derive(Debug, Default)] @@ -13,7 +17,7 @@ pub(crate) struct LocationIndices { impl LocationIndices { pub(crate) fn add_location(&mut self, location: Location, node_index: PetGraphIndex) { - // Some location spans are empty: maybe they are from ficticious nodes? + // Some location spans are empty: maybe they are from fictitious nodes? if location.span.start() == location.span.end() { return; } @@ -31,7 +35,7 @@ impl LocationIndices { impl NodeInterner { pub fn reference_location(&self, reference: ReferenceId) -> Location { match reference { - ReferenceId::Module(id) => self.module_location(&id), + ReferenceId::Module(id) => self.module_attributes(&id).location, ReferenceId::Function(id) => self.function_modifiers(&id).name_location, ReferenceId::Struct(id) => { let struct_type = self.get_struct(id); @@ -58,11 +62,97 @@ impl NodeInterner { } } - pub(crate) fn add_reference(&mut self, referenced: ReferenceId, reference: ReferenceId) { + pub fn reference_module(&self, reference: ReferenceId) -> Option<&ModuleId> { + self.reference_modules.get(&reference) + } + + pub(crate) fn add_module_def_id_reference( + &mut self, + def_id: ModuleDefId, + location: Location, + is_self_type: bool, + ) { + match def_id { + ModuleDefId::ModuleId(module_id) => { + self.add_module_reference(module_id, location); + } + ModuleDefId::FunctionId(func_id) => { + self.add_function_reference(func_id, location); + } + ModuleDefId::TypeId(struct_id) => { + self.add_struct_reference(struct_id, location, is_self_type); + } + ModuleDefId::TraitId(trait_id) => { + self.add_trait_reference(trait_id, location, is_self_type); + } + ModuleDefId::TypeAliasId(type_alias_id) => { + self.add_alias_reference(type_alias_id, location); + } + ModuleDefId::GlobalId(global_id) => { + self.add_global_reference(global_id, location); + } + }; + } + + pub(crate) fn add_module_reference(&mut self, id: ModuleId, location: Location) { + self.add_reference(ReferenceId::Module(id), location, false); + } + + pub(crate) fn add_struct_reference( + &mut self, + id: StructId, + location: Location, + is_self_type: bool, + ) { + self.add_reference(ReferenceId::Struct(id), location, is_self_type); + } + + pub(crate) fn add_struct_member_reference( + &mut self, + id: StructId, + member_index: usize, + location: Location, + ) { + self.add_reference(ReferenceId::StructMember(id, member_index), location, false); + } + + pub(crate) fn add_trait_reference( + &mut self, + id: TraitId, + location: Location, + is_self_type: bool, + ) { + self.add_reference(ReferenceId::Trait(id), location, is_self_type); + } + + pub(crate) fn add_alias_reference(&mut self, id: TypeAliasId, location: Location) { + self.add_reference(ReferenceId::Alias(id), location, false); + } + + pub(crate) fn add_function_reference(&mut self, id: FuncId, location: Location) { + self.add_reference(ReferenceId::Function(id), location, false); + } + + pub(crate) fn add_global_reference(&mut self, id: GlobalId, location: Location) { + self.add_reference(ReferenceId::Global(id), location, false); + } + + pub(crate) fn add_local_reference(&mut self, id: DefinitionId, location: Location) { + self.add_reference(ReferenceId::Local(id), location, false); + } + + pub(crate) fn add_reference( + &mut self, + referenced: ReferenceId, + location: Location, + is_self_type: bool, + ) { if !self.track_references { return; } + let reference = ReferenceId::Reference(location, is_self_type); + let referenced_index = self.get_or_insert_reference(referenced); let reference_location = self.reference_location(reference); let reference_index = self.reference_graph.add_node(reference); @@ -71,7 +161,11 @@ impl NodeInterner { self.location_indices.add_location(reference_location, reference_index); } - pub(crate) fn add_definition_location(&mut self, referenced: ReferenceId) { + pub(crate) fn add_definition_location( + &mut self, + referenced: ReferenceId, + module_id: Option, + ) { if !self.track_references { return; } @@ -79,6 +173,9 @@ impl NodeInterner { let referenced_index = self.get_or_insert_reference(referenced); let referenced_location = self.reference_location(referenced); self.location_indices.add_location(referenced_location, referenced_index); + if let Some(module_id) = module_id { + self.reference_modules.insert(referenced, module_id); + } } #[tracing::instrument(skip(self), ret)] @@ -110,7 +207,7 @@ impl NodeInterner { // Starting at the given location, find the node referenced by it. Then, gather // all locations that reference that node, and return all of them - // (the references and optionally the referenced node if `include_referencedd` is true). + // (the references and optionally the referenced node if `include_referenced` is true). // If `include_self_type_name` is true, references where "Self" is written are returned, // otherwise they are not. // Returns `None` if the location is not known to this interner. @@ -120,14 +217,8 @@ impl NodeInterner { include_referenced: bool, include_self_type_name: bool, ) -> Option> { - let node_index = self.location_indices.get_node_from_location(location)?; - - let reference_node = self.reference_graph[node_index]; - let referenced_node_index = if let ReferenceId::Reference(_, _) = reference_node { - self.referenced_index(node_index)? - } else { - node_index - }; + let referenced_node = self.find_referenced(location)?; + let referenced_node_index = self.reference_graph_indices[&referenced_node]; let found_locations = self.find_all_references_for_index( referenced_node_index, @@ -138,6 +229,19 @@ impl NodeInterner { Some(found_locations) } + // Returns the `ReferenceId` that is referenced by the given location, if any. + pub fn find_referenced(&self, location: Location) -> Option { + let node_index = self.location_indices.get_node_from_location(location)?; + + let reference_node = self.reference_graph[node_index]; + if let ReferenceId::Reference(_, _) = reference_node { + let node_index = self.referenced_index(node_index)?; + Some(self.reference_graph[node_index]) + } else { + Some(reference_node) + } + } + // Given a referenced node index, find all references to it and return their locations, optionally together // with the reference node's location if `include_referenced` is true. // If `include_self_type_name` is true, references where "Self" is written are returned, diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index be222cc4e35..a46f32e3094 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -21,7 +21,7 @@ use crate::{ types, }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, - Type, TypeBinding, TypeBindings, TypeVariable, TypeVariableKind, + Type, TypeBinding, TypeBindings, }; use acvm::{acir::AcirField, FieldElement}; use iter_extended::{btree_map, try_vecmap, vecmap}; @@ -1788,24 +1788,21 @@ pub fn perform_impl_bindings( if let Some(trait_method) = trait_method { let the_trait = interner.get_trait(trait_method.trait_id); - let trait_method_type = the_trait.methods[trait_method.method_index].typ.as_monotype(); + let mut trait_method_type = + the_trait.methods[trait_method.method_index].typ.as_monotype().clone(); + + let mut impl_method_type = + interner.function_meta(&impl_method).typ.unwrap_forall().1.clone(); // Make each NamedGeneric in this type bindable by replacing it with a TypeVariable // with the same internal id and binding. - let (generics, impl_method_type) = interner.function_meta(&impl_method).typ.unwrap_forall(); - - let replace_type_variable = |var: &TypeVariable| { - (var.id(), (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) - }; - - // Replace each NamedGeneric with a TypeVariable containing the same internal type variable - let type_bindings = generics.iter().map(replace_type_variable).collect(); - let impl_method_type = impl_method_type.force_substitute(&type_bindings); + trait_method_type.replace_named_generics_with_type_variables(); + impl_method_type.replace_named_generics_with_type_variables(); trait_method_type.try_unify(&impl_method_type, &mut bindings).map_err(|_| { InterpreterError::ImplMethodTypeMismatch { - expected: trait_method_type.clone(), - actual: impl_method_type, + expected: trait_method_type.follow_bindings(), + actual: impl_method_type.follow_bindings(), location, } })?; diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index a009a42df53..87ff45f8f1a 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -44,6 +44,13 @@ use crate::{Shared, TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeV /// This is needed to stop recursing for cases such as `impl Foo for T where T: Eq` const IMPL_SEARCH_RECURSION_LIMIT: u32 = 10; +#[derive(Debug)] +pub struct ModuleAttributes { + pub name: String, + pub location: Location, + pub parent: LocalModuleId, +} + type StructAttributes = Vec; /// The node interner is the central storage location of all nodes in Noir's Hir (the @@ -68,7 +75,7 @@ pub struct NodeInterner { function_modules: HashMap, // The location of each module - module_locations: HashMap, + module_attributes: HashMap, /// This graph tracks dependencies between different global definitions. /// This is used to ensure the absence of dependency cycles for globals and types. @@ -219,6 +226,10 @@ pub struct NodeInterner { /// Store the location of the references in the graph pub(crate) location_indices: LocationIndices, + + // The module where each reference is + // (ReferenceId::Reference and ReferenceId::Local aren't included here) + pub(crate) reference_modules: HashMap, } /// A dependency in the dependency graph may be a type or a definition. @@ -375,11 +386,6 @@ impl StmtId { #[derive(Debug, Eq, PartialEq, Hash, Copy, Clone, PartialOrd, Ord)] pub struct ExprId(Index); -impl ExprId { - pub fn empty_block_id() -> ExprId { - ExprId(Index::unsafe_zeroed()) - } -} #[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] pub struct FuncId(Index); @@ -537,6 +543,7 @@ pub struct GlobalInfo { pub definition_id: DefinitionId, pub ident: Ident, pub local_id: LocalModuleId, + pub crate_id: CrateId, pub location: Location, pub let_statement: StmtId, pub value: Option, @@ -547,13 +554,13 @@ pub struct QuotedTypeId(noirc_arena::Index); impl Default for NodeInterner { fn default() -> Self { - let mut interner = NodeInterner { + NodeInterner { nodes: Arena::default(), func_meta: HashMap::new(), function_definition_ids: HashMap::new(), function_modifiers: HashMap::new(), function_modules: HashMap::new(), - module_locations: HashMap::new(), + module_attributes: HashMap::new(), func_id_to_trait: HashMap::new(), dependency_graph: petgraph::graph::DiGraph::new(), dependency_graph_indices: HashMap::new(), @@ -586,12 +593,8 @@ impl Default for NodeInterner { location_indices: LocationIndices::default(), reference_graph: petgraph::graph::DiGraph::new(), reference_graph_indices: HashMap::new(), - }; - - // An empty block expression is used often, we add this into the `node` on startup - let expr_id = interner.push_expr(HirExpression::empty_block()); - assert_eq!(expr_id, ExprId::empty_block_id()); - interner + reference_modules: HashMap::new(), + } } } @@ -754,6 +757,7 @@ impl NodeInterner { &mut self, ident: Ident, local_id: LocalModuleId, + crate_id: CrateId, let_statement: StmtId, file: FileId, attributes: Vec, @@ -771,6 +775,7 @@ impl NodeInterner { definition_id, ident, local_id, + crate_id, let_statement, location, value: None, @@ -784,10 +789,12 @@ impl NodeInterner { } /// Intern an empty global. Used for collecting globals before they're defined + #[allow(clippy::too_many_arguments)] pub fn push_empty_global( &mut self, name: Ident, local_id: LocalModuleId, + crate_id: CrateId, file: FileId, attributes: Vec, mutable: bool, @@ -795,7 +802,8 @@ impl NodeInterner { ) -> GlobalId { let statement = self.push_stmt(HirStatement::Error); let span = name.span(); - let id = self.push_global(name, local_id, statement, file, attributes, mutable, comptime); + let id = self + .push_global(name, local_id, crate_id, statement, file, attributes, mutable, comptime); self.push_stmt_location(statement, span, file); id } @@ -854,7 +862,7 @@ impl NodeInterner { self.definitions.push(DefinitionInfo { name, mutable, comptime, kind, location }); if is_local { - self.add_definition_location(ReferenceId::Local(id)); + self.add_definition_location(ReferenceId::Local(id), None); } id @@ -892,7 +900,7 @@ impl NodeInterner { // This needs to be done after pushing the definition since it will reference the // location that was stored - self.add_definition_location(ReferenceId::Function(id)); + self.add_definition_location(ReferenceId::Function(id), Some(module)); definition_id } @@ -993,12 +1001,20 @@ impl NodeInterner { &self.struct_attributes[struct_id] } - pub fn add_module_location(&mut self, module_id: ModuleId, location: Location) { - self.module_locations.insert(module_id, location); + pub fn add_module_attributes(&mut self, module_id: ModuleId, attributes: ModuleAttributes) { + self.module_attributes.insert(module_id, attributes); + } + + pub fn module_attributes(&self, module_id: &ModuleId) -> &ModuleAttributes { + &self.module_attributes[module_id] + } + + pub fn try_module_attributes(&self, module_id: &ModuleId) -> Option<&ModuleAttributes> { + self.module_attributes.get(module_id) } - pub fn module_location(&self, module_id: &ModuleId) -> Location { - self.module_locations[module_id] + pub fn try_module_parent(&self, module_id: &ModuleId) -> Option { + self.try_module_attributes(module_id).map(|attrs| attrs.parent) } pub fn global_attributes(&self, global_id: &GlobalId) -> &[SecondaryAttribute] { @@ -1395,8 +1411,14 @@ impl NodeInterner { type_bindings: &mut TypeBindings, recursion_limit: u32, ) -> Result> { - let make_constraint = - || TraitConstraint::new(object_type.clone(), trait_id, trait_generics.to_vec()); + let make_constraint = || { + TraitConstraint::new( + object_type.clone(), + trait_id, + trait_generics.to_vec(), + Span::default(), + ) + }; // Prevent infinite recursion when looking for impls if recursion_limit == 0 { diff --git a/compiler/noirc_frontend/src/parser/errors.rs b/compiler/noirc_frontend/src/parser/errors.rs index 41ea9f88c19..c566489eb40 100644 --- a/compiler/noirc_frontend/src/parser/errors.rs +++ b/compiler/noirc_frontend/src/parser/errors.rs @@ -20,8 +20,8 @@ pub enum ParserErrorReason { MissingSeparatingSemi, #[error("constrain keyword is deprecated")] ConstrainDeprecated, - #[error("Expression is invalid in an array-length type: '{0}'. Only unsigned integer constants, globals, generics, +, -, *, /, and % may be used in this context.")] - InvalidArrayLengthExpression(Expression), + #[error("Invalid type expression: '{0}'. Only unsigned integer constants up to `u32`, globals, generics, +, -, *, /, and % may be used in this context.")] + InvalidTypeExpression(Expression), #[error("Early 'return' is unsupported")] EarlyReturn, #[error("Patterns aren't allowed in a trait's function declarations")] @@ -44,6 +44,8 @@ pub enum ParserErrorReason { InvalidBitSize(u32), #[error("{0}")] Lexer(LexerErrorKind), + #[error("The only supported numeric generic types are `u1`, `u8`, `u16`, and `u32`")] + ForbiddenNumericGenericType, } /// Represents a parsing error, or a parsing error in the making. diff --git a/compiler/noirc_frontend/src/parser/mod.rs b/compiler/noirc_frontend/src/parser/mod.rs index c4aa0654ecd..c62d66769ac 100644 --- a/compiler/noirc_frontend/src/parser/mod.rs +++ b/compiler/noirc_frontend/src/parser/mod.rs @@ -22,7 +22,7 @@ use chumsky::primitive::Container; pub use errors::ParserError; pub use errors::ParserErrorReason; use noirc_errors::Span; -pub use parser::{expression, parse_program, top_level_items}; +pub use parser::{expression, parse_program, top_level_items, trait_bound}; #[derive(Debug, Clone)] pub enum TopLevelStatement { @@ -39,6 +39,24 @@ pub enum TopLevelStatement { Error, } +impl TopLevelStatement { + pub fn into_item_kind(self) -> Option { + match self { + TopLevelStatement::Function(f) => Some(ItemKind::Function(f)), + TopLevelStatement::Module(m) => Some(ItemKind::ModuleDecl(m)), + TopLevelStatement::Import(i) => Some(ItemKind::Import(i)), + TopLevelStatement::Struct(s) => Some(ItemKind::Struct(s)), + TopLevelStatement::Trait(t) => Some(ItemKind::Trait(t)), + TopLevelStatement::TraitImpl(t) => Some(ItemKind::TraitImpl(t)), + TopLevelStatement::Impl(i) => Some(ItemKind::Impl(i)), + TopLevelStatement::TypeAlias(t) => Some(ItemKind::TypeAlias(t)), + TopLevelStatement::SubModule(s) => Some(ItemKind::Submodules(s)), + TopLevelStatement::Global(c) => Some(ItemKind::Global(c)), + TopLevelStatement::Error => None, + } + } +} + // Helper trait that gives us simpler type signatures for return types: // e.g. impl Parser versus impl Parser> pub trait NoirParser: Parser + Sized + Clone {} diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index de9095aaff2..7f3e0e68bbc 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -171,20 +171,8 @@ fn module() -> impl NoirParser { .to(ParsedModule::default()) .then(spanned(top_level_statement(module_parser)).repeated()) .foldl(|mut program, (statement, span)| { - let mut push_item = |kind| program.items.push(Item { kind, span }); - - match statement { - TopLevelStatement::Function(f) => push_item(ItemKind::Function(f)), - TopLevelStatement::Module(m) => push_item(ItemKind::ModuleDecl(m)), - TopLevelStatement::Import(i) => push_item(ItemKind::Import(i)), - TopLevelStatement::Struct(s) => push_item(ItemKind::Struct(s)), - TopLevelStatement::Trait(t) => push_item(ItemKind::Trait(t)), - TopLevelStatement::TraitImpl(t) => push_item(ItemKind::TraitImpl(t)), - TopLevelStatement::Impl(i) => push_item(ItemKind::Impl(i)), - TopLevelStatement::TypeAlias(t) => push_item(ItemKind::TypeAlias(t)), - TopLevelStatement::SubModule(s) => push_item(ItemKind::Submodules(s)), - TopLevelStatement::Global(c) => push_item(ItemKind::Global(c)), - TopLevelStatement::Error => (), + if let Some(kind) = statement.into_item_kind() { + program.items.push(Item { kind, span }); } program }) @@ -204,9 +192,9 @@ pub fn top_level_items() -> impl NoirParser> { /// | module_declaration /// | use_statement /// | global_declaration -fn top_level_statement( - module_parser: impl NoirParser, -) -> impl NoirParser { +fn top_level_statement<'a>( + module_parser: impl NoirParser + 'a, +) -> impl NoirParser + 'a { choice(( function::function_definition(false).map(TopLevelStatement::Function), structs::struct_definition(), @@ -227,8 +215,9 @@ fn top_level_statement( /// /// implementation: 'impl' generics type '{' function_definition ... '}' fn implementation() -> impl NoirParser { - keyword(Keyword::Impl) - .ignore_then(function::generics()) + maybe_comp_time() + .then_ignore(keyword(Keyword::Impl)) + .then(function::generics()) .then(parse_type().map_with_span(|typ, span| (typ, span))) .then(where_clause()) .then_ignore(just(Token::LeftBrace)) @@ -236,13 +225,14 @@ fn implementation() -> impl NoirParser { .then_ignore(just(Token::RightBrace)) .map(|args| { let ((other_args, where_clause), methods) = args; - let (generics, (object_type, type_span)) = other_args; + let ((is_comptime, generics), (object_type, type_span)) = other_args; TopLevelStatement::Impl(TypeImpl { generics, object_type, type_span, where_clause, methods, + is_comptime, }) }) } @@ -408,7 +398,7 @@ fn trait_bounds() -> impl NoirParser> { trait_bound().separated_by(just(Token::Plus)).at_least(1).allow_trailing() } -fn trait_bound() -> impl NoirParser { +pub fn trait_bound() -> impl NoirParser { path().then(generic_type_args(parse_type())).map(|(trait_path, trait_generics)| TraitBound { trait_path, trait_generics, diff --git a/compiler/noirc_frontend/src/parser/parser/function.rs b/compiler/noirc_frontend/src/parser/parser/function.rs index 3e686ee4c85..2fd337e1cb1 100644 --- a/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/compiler/noirc_frontend/src/parser/parser/function.rs @@ -4,13 +4,17 @@ use super::{ parameter_name_recovery, parameter_recovery, parenthesized, parse_type, pattern, self_parameter, where_clause, NoirParser, }; -use crate::ast::{ - FunctionDefinition, FunctionReturnType, ItemVisibility, NoirFunction, Param, Visibility, -}; -use crate::parser::spanned; use crate::token::{Keyword, Token}; +use crate::{ast::IntegerBitSize, parser::spanned}; +use crate::{ + ast::{ + FunctionDefinition, FunctionReturnType, ItemVisibility, NoirFunction, Param, Visibility, + }, + macros_api::UnresolvedTypeData, + parser::{ParserError, ParserErrorReason}, +}; use crate::{ - ast::{UnresolvedGeneric, UnresolvedGenerics}, + ast::{Signedness, UnresolvedGeneric, UnresolvedGenerics}, parser::labels::ParsingRuleLabel, }; @@ -85,6 +89,21 @@ pub(super) fn numeric_generic() -> impl NoirParser { .then_ignore(just(Token::Colon)) .then(parse_type()) .map(|(ident, typ)| UnresolvedGeneric::Numeric { ident, typ }) + .validate(|generic, span, emit| { + if let UnresolvedGeneric::Numeric { typ, .. } = &generic { + if let UnresolvedTypeData::Integer(signedness, bit_size) = typ.typ { + if matches!(signedness, Signedness::Signed) + || matches!(bit_size, IntegerBitSize::SixtyFour) + { + emit(ParserError::with_reason( + ParserErrorReason::ForbiddenNumericGenericType, + span, + )); + } + } + } + generic + }) } pub(super) fn generic_type() -> impl NoirParser { @@ -211,7 +230,7 @@ mod test { // fn func_name(x: impl Eq) {} with error Expected an end of input but found end of input // "fn func_name(x: impl Eq) {}", "fn func_name(x: impl Eq, y : T) where T: SomeTrait + Eq {}", - "fn func_name(x: [Field; N]) {}", + "fn func_name(x: [Field; N]) {}", ], ); @@ -232,7 +251,12 @@ mod test { "fn func_name(y: T) {}", "fn func_name(y: T) {}", "fn func_name(y: T) {}", - "fn func_name(y: T) {}", + // Test failure of missing `let` + "fn func_name(y: T) {}", + // Test that signed numeric generics are banned + "fn func_name() {}", + // Test that `u64` is banned + "fn func_name(x: [Field; N]) {}", ], ); } diff --git a/compiler/noirc_frontend/src/parser/parser/path.rs b/compiler/noirc_frontend/src/parser/parser/path.rs index e40268af410..8957fb7c40b 100644 --- a/compiler/noirc_frontend/src/parser/parser/path.rs +++ b/compiler/noirc_frontend/src/parser/parser/path.rs @@ -17,6 +17,7 @@ pub(super) fn path() -> impl NoirParser { choice(( path_kind(Keyword::Crate, PathKind::Crate), path_kind(Keyword::Dep, PathKind::Dep), + path_kind(Keyword::Super, PathKind::Super), idents().map_with_span(make_path(PathKind::Plain)), )) } @@ -64,6 +65,7 @@ mod test { ("std", PathKind::Plain), ("hash::collections", PathKind::Plain), ("crate::std::hash", PathKind::Crate), + ("super::foo", PathKind::Super), ]; for (src, expected_path_kind) in cases { diff --git a/compiler/noirc_frontend/src/parser/parser/structs.rs b/compiler/noirc_frontend/src/parser/parser/structs.rs index 7da956bdfea..9a3adf74d7f 100644 --- a/compiler/noirc_frontend/src/parser/parser/structs.rs +++ b/compiler/noirc_frontend/src/parser/parser/structs.rs @@ -1,6 +1,7 @@ use chumsky::prelude::*; use crate::ast::{Ident, NoirStruct, UnresolvedType}; +use crate::parser::parser::types::maybe_comp_time; use crate::{ parser::{ parser::{ @@ -28,13 +29,21 @@ pub(super) fn struct_definition() -> impl NoirParser { .or(just(Semicolon).to(Vec::new())); attributes() + .then(maybe_comp_time()) .then_ignore(keyword(Struct)) .then(ident()) .then(function::generics()) .then(fields) - .validate(|(((raw_attributes, name), generics), fields), span, emit| { - let attributes = validate_secondary_attributes(raw_attributes, span, emit); - TopLevelStatement::Struct(NoirStruct { name, attributes, generics, fields, span }) + .validate(|((((attributes, is_comptime), name), generics), fields), span, emit| { + let attributes = validate_secondary_attributes(attributes, span, emit); + TopLevelStatement::Struct(NoirStruct { + name, + attributes, + generics, + fields, + span, + is_comptime, + }) }) } diff --git a/compiler/noirc_frontend/src/parser/parser/traits.rs b/compiler/noirc_frontend/src/parser/parser/traits.rs index 1aec57c8e41..4e4c9d5c0db 100644 --- a/compiler/noirc_frontend/src/parser/parser/traits.rs +++ b/compiler/noirc_frontend/src/parser/parser/traits.rs @@ -1,6 +1,8 @@ use chumsky::prelude::*; +use super::attributes::{attributes, validate_secondary_attributes}; use super::function::function_return_type; +use super::types::maybe_comp_time; use super::{block, expression, fresh_statement, function, function_declaration_parameters}; use crate::ast::{ @@ -18,15 +20,24 @@ use crate::{ use super::{generic_type_args, parse_type, path, primitives::ident}; pub(super) fn trait_definition() -> impl NoirParser { - keyword(Keyword::Trait) - .ignore_then(ident()) + attributes() + .then_ignore(keyword(Keyword::Trait)) + .then(ident()) .then(function::generics()) .then(where_clause()) .then_ignore(just(Token::LeftBrace)) .then(trait_body()) .then_ignore(just(Token::RightBrace)) - .map_with_span(|(((name, generics), where_clause), items), span| { - TopLevelStatement::Trait(NoirTrait { name, generics, where_clause, span, items }) + .validate(|((((attributes, name), generics), where_clause), items), span, emit| { + let attributes = validate_secondary_attributes(attributes, span, emit); + TopLevelStatement::Trait(NoirTrait { + name, + generics, + where_clause, + span, + items, + attributes, + }) }) } @@ -93,8 +104,9 @@ fn trait_type_declaration() -> impl NoirParser { /// /// trait_implementation: 'impl' generics ident generic_args for type '{' trait_implementation_body '}' pub(super) fn trait_implementation() -> impl NoirParser { - keyword(Keyword::Impl) - .ignore_then(function::generics()) + maybe_comp_time() + .then_ignore(keyword(Keyword::Impl)) + .then(function::generics()) .then(path()) .then(generic_type_args(parse_type())) .then_ignore(keyword(Keyword::For)) @@ -104,8 +116,8 @@ pub(super) fn trait_implementation() -> impl NoirParser { .then(trait_implementation_body()) .then_ignore(just(Token::RightBrace)) .map(|args| { - let ((other_args, where_clause), items) = args; - let (((impl_generics, trait_name), trait_generics), object_type) = other_args; + let (((other_args, object_type), where_clause), items) = args; + let (((is_comptime, impl_generics), trait_name), trait_generics) = other_args; TopLevelStatement::TraitImpl(NoirTraitImpl { impl_generics, @@ -114,6 +126,7 @@ pub(super) fn trait_implementation() -> impl NoirParser { object_type, items, where_clause, + is_comptime, }) }) } diff --git a/compiler/noirc_frontend/src/parser/parser/types.rs b/compiler/noirc_frontend/src/parser/parser/types.rs index 32929312d54..cecc1cbcd4c 100644 --- a/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/compiler/noirc_frontend/src/parser/parser/types.rs @@ -3,7 +3,9 @@ use super::{ expression_with_precedence, keyword, nothing, parenthesized, path, NoirParser, ParserError, ParserErrorReason, Precedence, }; -use crate::ast::{Recoverable, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression}; +use crate::ast::{ + Expression, Recoverable, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, +}; use crate::QuotedType; use crate::parser::labels::ParsingRuleLabel; @@ -26,6 +28,10 @@ pub(super) fn parse_type_inner<'a>( string_type(), expr_type(), struct_definition_type(), + trait_constraint_type(), + trait_definition_type(), + function_definition_type(), + module_type(), top_level_item_type(), type_of_quoted_types(), quoted_type(), @@ -54,15 +60,7 @@ pub(super) fn parenthesized_type( } pub(super) fn maybe_comp_time() -> impl NoirParser { - keyword(Keyword::Comptime).or_not().validate(|opt, span, emit| { - if opt.is_some() { - emit(ParserError::with_reason( - ParserErrorReason::ExperimentalFeature("Comptime values"), - span, - )); - } - opt.is_some() - }) + keyword(Keyword::Comptime).or_not().map(|opt| opt.is_some()) } pub(super) fn field_type() -> impl NoirParser { @@ -87,6 +85,30 @@ pub(super) fn struct_definition_type() -> impl NoirParser { }) } +/// This is the type `TraitConstraint` - the type of a quoted trait constraint +pub(super) fn trait_constraint_type() -> impl NoirParser { + keyword(Keyword::TraitConstraint).map_with_span(|_, span| { + UnresolvedTypeData::Quoted(QuotedType::TraitConstraint).with_span(span) + }) +} + +pub(super) fn trait_definition_type() -> impl NoirParser { + keyword(Keyword::TraitDefinition).map_with_span(|_, span| { + UnresolvedTypeData::Quoted(QuotedType::TraitDefinition).with_span(span) + }) +} + +pub(super) fn function_definition_type() -> impl NoirParser { + keyword(Keyword::FunctionDefinition).map_with_span(|_, span| { + UnresolvedTypeData::Quoted(QuotedType::FunctionDefinition).with_span(span) + }) +} + +pub(super) fn module_type() -> impl NoirParser { + keyword(Keyword::Module) + .map_with_span(|_, span| UnresolvedTypeData::Quoted(QuotedType::Module).with_span(span)) +} + /// This is the type `TopLevelItem` - the type of a quoted statement in the top level. /// E.g. a type definition, trait definition, trait impl, function, etc. fn top_level_item_type() -> impl NoirParser { @@ -181,8 +203,7 @@ pub(super) fn generic_type_args<'a>( // separator afterward. Failing early here ensures we try the `type_expression` // parser afterward. .then_ignore(one_of([Token::Comma, Token::Greater]).rewind()) - .or(type_expression() - .map_with_span(|expr, span| UnresolvedTypeData::Expression(expr).with_span(span))) + .or(type_expression_validated()) .separated_by(just(Token::Comma)) .allow_trailing() .at_least(1) @@ -214,7 +235,26 @@ pub(super) fn slice_type( }) } -pub(super) fn type_expression() -> impl NoirParser { +fn type_expression() -> impl NoirParser { + type_expression_inner().try_map(UnresolvedTypeExpression::from_expr) +} + +/// This parser is the same as `type_expression()`, however, it continues parsing and +/// emits a parser error in the case of an invalid type expression rather than halting the parser. +fn type_expression_validated() -> impl NoirParser { + type_expression_inner().validate(|expr, span, emit| { + let type_expr = UnresolvedTypeExpression::from_expr(expr, span); + match type_expr { + Ok(type_expression) => UnresolvedTypeData::Expression(type_expression).with_span(span), + Err(parser_error) => { + emit(parser_error); + UnresolvedType::error(span) + } + } + }) +} + +fn type_expression_inner() -> impl NoirParser { recursive(|expr| { expression_with_precedence( Precedence::lowest_type_precedence(), @@ -226,7 +266,6 @@ pub(super) fn type_expression() -> impl NoirParser { ) }) .labelled(ParsingRuleLabel::TypeExpression) - .try_map(UnresolvedTypeExpression::from_expr) } pub(super) fn tuple_type(type_parser: T) -> impl NoirParser diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 8cedeeeff0d..cbc15da20ff 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -50,10 +50,7 @@ pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, F }); } -pub(crate) fn get_program( - src: &str, - use_legacy: bool, -) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { +pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); let fm = FileManager::new(root); @@ -80,12 +77,11 @@ pub(crate) fn get_program( }; // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( + errors.extend(DefCollector::collect_crate_and_dependencies( def_map, &mut context, program.clone().into_sorted(), root_file_id, - use_legacy, None, // No debug_comptime_in_file &[], // No macro processors )); @@ -94,7 +90,14 @@ pub(crate) fn get_program( } pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src, false).2 + get_program(src).2 +} + +fn assert_no_errors(src: &str) { + let errors = get_program_errors(src); + if !errors.is_empty() { + panic!("Expected no errors, got: {:?}", errors); + } } #[test] @@ -141,10 +144,7 @@ fn check_trait_implemented_for_all_t() { fn main(a: Foo) -> pub bool { a.is_default() }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -767,9 +767,7 @@ fn test_impl_self_within_default_def() { self } }"; - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -794,10 +792,7 @@ fn check_trait_as_type_as_fn_parameter() { fn main(a: Foo) -> pub bool { test_eq(a) }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -830,14 +825,11 @@ fn check_trait_as_type_as_two_fn_parameters() { fn main(a: Foo, b: u64) -> pub bool { test_eq(a, b) }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + assert_no_errors(src); } fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src, false); + let (program, context, _errors) = get_program(src); let interner = context.def_interner; let mut all_captures: Vec> = Vec::new(); for func in program.into_sorted().functions { @@ -898,7 +890,7 @@ fn resolve_empty_function() { } "; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] fn resolve_basic_function() { @@ -908,7 +900,7 @@ fn resolve_basic_function() { assert(y == x); } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] fn resolve_unused_var() { @@ -981,7 +973,7 @@ fn resolve_literal_expr() { assert(y == x); } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] @@ -1028,7 +1020,7 @@ fn resolve_prefix_expr() { let _y = -x; } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] @@ -1040,7 +1032,7 @@ fn resolve_for_expr() { }; } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] @@ -1054,7 +1046,7 @@ fn resolve_call_expr() { x } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] @@ -1071,7 +1063,7 @@ fn resolve_shadowing() { x } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] @@ -1082,7 +1074,7 @@ fn resolve_basic_closure() { closure(x) } "#; - assert!(get_program_errors(src).is_empty()); + assert_no_errors(src); } #[test] @@ -1133,7 +1125,7 @@ fn resolve_complex_closures() { a + b + c + closure_with_transitive_captures(6) } "#; - assert!(get_program_errors(src).is_empty(), "there should be no errors"); + assert_no_errors(src); let expected_captures = vec![ vec![], @@ -1199,7 +1191,7 @@ fn resolve_fmt_strings() { } fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src, false); + let (_program, mut context, _errors) = get_program(src); let main_func_id = context.def_interner.find_function("main").unwrap(); let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); assert!(format!("{}", program) == expected); @@ -1636,14 +1628,13 @@ fn numeric_generic_in_function_signature() { let src = r#" fn foo(arr: [Field; N]) -> [Field; N] { arr } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] fn numeric_generic_as_struct_field_type() { let src = r#" - struct Foo { + struct Foo { a: Field, b: N, } @@ -1704,7 +1695,7 @@ fn numeric_generic_as_param_type() { #[test] fn numeric_generic_used_in_nested_type_fail() { let src = r#" - struct Foo { + struct Foo { a: Field, b: Bar, } @@ -1749,12 +1740,14 @@ fn numeric_generic_used_in_nested_type_pass() { inner: [u64; N], } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] fn numeric_generic_used_in_trait() { + // We want to make sure that `N` in `impl Deserialize` does + // not trigger `expected type, found numeric generic parameter N` as the trait + // does in fact expect a numeric generic. let src = r#" struct MyType { a: Field, @@ -1763,7 +1756,7 @@ fn numeric_generic_used_in_trait() { d: T, } - impl Deserialize for MyType { + impl Deserialize for MyType { fn deserialize(fields: [Field; N], other: T) -> Self { MyType { a: fields[0], b: fields[1], c: fields[2], d: other } } @@ -1773,11 +1766,7 @@ fn numeric_generic_used_in_trait() { fn deserialize(fields: [Field; N], other: T) -> Self; } "#; - let errors = get_program_errors(src); - // We want to make sure that `N` in `impl Deserialize` does - // not trigger `expected type, found numeric generic parameter N` as the trait - // does in fact expect a numeric generic. - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -1808,8 +1797,7 @@ fn numeric_generic_in_trait_impl_with_extra_impl_generics() { fn deserialize(fields: [Field; N]) -> Self; } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -1827,8 +1815,7 @@ fn numeric_generic_used_in_where_clause() { T::deserialize(fields) } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -1845,8 +1832,7 @@ fn numeric_generic_used_in_turbofish() { assert(double::<7 + 8>() == 30); } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -1866,8 +1852,7 @@ fn constant_used_with_numeric_generic() { } } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); } #[test] @@ -1971,6 +1956,328 @@ fn quote_code_fragments() { assert!(matches!(&errors[0].0, CompilationError::InterpreterError(FailingConstraint { .. }))); } +#[test] +fn impl_stricter_than_trait_no_trait_method_constraints() { + // This test ensures that the error we get from the where clause on the trait impl method + // is a `DefCollectorErrorKind::ImplIsStricterThanTrait` error. + let src = r#" + trait Serialize { + // We want to make sure we trigger the error when override a trait method + // which itself has no trait constraints. + fn serialize(self) -> [Field; N]; + } + + trait ToField { + fn to_field(self) -> Field; + } + + fn process_array(array: [Field; N]) -> Field { + array[0] + } + + fn serialize_thing(thing: A) -> [Field; N] where A: Serialize { + thing.serialize() + } + + struct MyType { + a: T, + b: T, + } + + impl Serialize<2> for MyType { + fn serialize(self) -> [Field; 2] where T: ToField { + [ self.a.to_field(), self.b.to_field() ] + } + } + + impl MyType { + fn do_thing_with_serialization_with_extra_steps(self) -> Field { + process_array(serialize_thing(self)) + } + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + &errors[0].0, + CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { .. }) + )); +} + +#[test] +fn impl_stricter_than_trait_different_generics() { + let src = r#" + trait Default { } + + // Object type of the trait constraint differs + trait Foo { + fn foo_good() where T: Default; + + fn foo_bad() where T: Default; + } + + impl Foo for () { + fn foo_good() where A: Default {} + + fn foo_bad() where B: Default {} + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + .. + }) = &errors[0].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "B")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } +} + +#[test] +fn impl_stricter_than_trait_different_object_generics() { + let src = r#" + trait MyTrait { } + + trait OtherTrait {} + + struct Option { + inner: T + } + + struct OtherOption { + inner: Option, + } + + trait Bar { + fn bar_good() where Option: MyTrait, OtherOption>: OtherTrait; + + fn bar_bad() where Option: MyTrait, OtherOption>: OtherTrait; + + fn array_good() where [T; 8]: MyTrait; + + fn array_bad() where [T; 8]: MyTrait; + + fn tuple_good() where (Option, Option): MyTrait; + + fn tuple_bad() where (Option, Option): MyTrait; + } + + impl Bar for () { + fn bar_good() + where + OtherOption>: OtherTrait, + Option: MyTrait { } + + fn bar_bad() + where + OtherOption>: OtherTrait, + Option: MyTrait { } + + fn array_good() where [A; 8]: MyTrait { } + + fn array_bad() where [B; 8]: MyTrait { } + + fn tuple_good() where (Option, Option): MyTrait { } + + fn tuple_bad() where (Option, Option): MyTrait { } + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 3); + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + constraint_name, + .. + }) = &errors[0].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "Option")); + assert!(matches!(constraint_name.as_str(), "MyTrait")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } + + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + constraint_name, + .. + }) = &errors[1].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "[B; 8]")); + assert!(matches!(constraint_name.as_str(), "MyTrait")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } + + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + constraint_name, + .. + }) = &errors[2].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "(Option, Option)")); + assert!(matches!(constraint_name.as_str(), "MyTrait")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } +} + +#[test] +fn impl_stricter_than_trait_different_trait() { + let src = r#" + trait Default { } + + trait OtherDefault { } + + struct Option { + inner: T + } + + trait Bar { + fn bar() where Option: Default; + } + + impl Bar for () { + // Trait constraint differs due to the trait even though the constraint + // types are the same. + fn bar() where Option: OtherDefault {} + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + constraint_name, + .. + }) = &errors[0].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "Option")); + assert!(matches!(constraint_name.as_str(), "OtherDefault")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } +} + +#[test] +fn trait_impl_where_clause_stricter_pass() { + let src = r#" + trait MyTrait { + fn good_foo() where H: OtherTrait; + + fn bad_foo() where H: OtherTrait; + } + + trait OtherTrait {} + + struct Option { + inner: T + } + + impl MyTrait for [T] where Option: MyTrait { + fn good_foo() where B: OtherTrait { } + + fn bad_foo() where A: OtherTrait { } + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + constraint_name, + .. + }) = &errors[0].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "A")); + assert!(matches!(constraint_name.as_str(), "OtherTrait")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } +} + +#[test] +fn impl_stricter_than_trait_different_trait_generics() { + let src = r#" + trait Foo { + fn foo() where T: T2; + } + + impl Foo for () { + // Should be A: T2 + fn foo() where A: T2 {} + } + + trait T2 {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + if let CompilationError::DefinitionError(DefCollectorErrorKind::ImplIsStricterThanTrait { + constraint_typ, + constraint_name, + constraint_generics, + .. + }) = &errors[0].0 + { + assert!(matches!(constraint_typ.to_string().as_str(), "A")); + assert!(matches!(constraint_name.as_str(), "T2")); + assert!(matches!(constraint_generics[0].to_string().as_str(), "B")); + } else { + panic!("Expected DefCollectorErrorKind::ImplIsStricterThanTrait but got {:?}", errors[0].0); + } +} + +#[test] +fn impl_not_found_for_inner_impl() { + // We want to guarantee that we get a no impl found error + let src = r#" + trait Serialize { + fn serialize(self) -> [Field; N]; + } + + trait ToField { + fn to_field(self) -> Field; + } + + fn process_array(array: [Field; N]) -> Field { + array[0] + } + + fn serialize_thing(thing: A) -> [Field; N] where A: Serialize { + thing.serialize() + } + + struct MyType { + a: T, + b: T, + } + + impl Serialize<2> for MyType where T: ToField { + fn serialize(self) -> [Field; 2] { + [ self.a.to_field(), self.b.to_field() ] + } + } + + impl MyType { + fn do_thing_with_serialization_with_extra_steps(self) -> Field { + process_array(serialize_thing(self)) + } + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + &errors[0].0, + CompilationError::TypeError(TypeCheckError::NoMatchingImplFound { .. }) + )); +} + // Regression for #5388 #[test] fn comptime_let() { @@ -2076,8 +2383,7 @@ fn turbofish_numeric_generic_nested_call() { let _ = bar::(); } "#; - let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_no_errors(src); // Check for turbofish numeric generics used with method calls let src = r#" @@ -2107,6 +2413,84 @@ fn turbofish_numeric_generic_nested_call() { let _ = bar::(); } "#; + assert_no_errors(src); +} + +#[test] +fn use_super() { + let src = r#" + fn some_func() {} + + mod foo { + use super::some_func; + } + "#; + assert_no_errors(src); +} + +#[test] +fn use_super_in_path() { + let src = r#" + fn some_func() {} + + mod foo { + fn func() { + super::some_func(); + } + } + "#; + assert_no_errors(src); +} + +#[test] +fn no_super() { + let src = "use super::some_func;"; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::DefinitionError(DefCollectorErrorKind::PathResolutionError( + PathResolutionError::NoSuper(span), + )) = &errors[0].0 + else { + panic!("Expected a 'no super' error, got {:?}", errors[0].0); + }; + + assert_eq!(span.start(), 4); + assert_eq!(span.end(), 9); +} + +#[test] +fn trait_impl_generics_count_mismatch() { + let src = r#" + trait Foo {} + + impl Foo<()> for Field {} + + fn main() {}"#; let errors = get_program_errors(src); - assert!(errors.is_empty()); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::GenericCountMismatch { + item, + expected, + found, + .. + }) = &errors[0].0 + else { + panic!("Expected a generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(item, "Foo"); + assert_eq!(*expected, 0); + assert_eq!(*found, 1); +} + +#[test] +fn bit_not_on_untyped_integer() { + let src = r#" + fn main() { + let _: u32 = 3 & !1; + } + "#; + assert_no_errors(src); } diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index 49956d79882..f9606003c7a 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.31.0", + "version": "0.32.0", "license": "(MIT OR Apache-2.0)", "main": "dist/main.js", "types": "./dist/types/src/index.d.cts", diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index 59b0e00e49f..05f42bc91a1 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -164,10 +164,9 @@ pub fn compile_program( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions { - expression_width: ExpressionWidth::Bounded { width: 4 }, - ..CompileOptions::default() - }; + let expression_width = ExpressionWidth::Bounded { width: 4 }; + let compile_options = + CompileOptions { expression_width: Some(expression_width), ..CompileOptions::default() }; let compiled_program = noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) @@ -180,8 +179,7 @@ pub fn compile_program( })? .0; - let optimized_program = - nargo::ops::transform_program(compiled_program, compile_options.expression_width); + let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -196,10 +194,9 @@ pub fn compile_contract( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions { - expression_width: ExpressionWidth::Bounded { width: 4 }, - ..CompileOptions::default() - }; + let expression_width = ExpressionWidth::Bounded { width: 4 }; + let compile_options = + CompileOptions { expression_width: Some(expression_width), ..CompileOptions::default() }; let compiled_contract = noirc_driver::compile_contract(&mut context, crate_id, &compile_options) @@ -212,8 +209,7 @@ pub fn compile_contract( })? .0; - let optimized_contract = - nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); + let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); let warnings = optimized_contract.warnings.clone(); Ok(JsCompileContractResult::new(optimized_contract.into(), warnings)) diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index d5f02833521..ef2af1dd654 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -100,7 +100,10 @@ impl CompilerContext { } else { ExpressionWidth::Bounded { width: 4 } }; - let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; + let compile_options = CompileOptions { + expression_width: Some(expression_width), + ..CompileOptions::default() + }; let root_crate_id = *self.context.root_crate_id(); let compiled_program = @@ -114,8 +117,7 @@ impl CompilerContext { })? .0; - let optimized_program = - nargo::ops::transform_program(compiled_program, compile_options.expression_width); + let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -130,7 +132,10 @@ impl CompilerContext { } else { ExpressionWidth::Bounded { width: 4 } }; - let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; + let compile_options = CompileOptions { + expression_width: Some(expression_width), + ..CompileOptions::default() + }; let root_crate_id = *self.context.root_crate_id(); let compiled_contract = @@ -145,7 +150,7 @@ impl CompilerContext { .0; let optimized_contract = - nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); + nargo::ops::transform_contract(compiled_contract, expression_width); let warnings = optimized_contract.warnings.clone(); Ok(JsCompileContractResult::new(optimized_contract.into(), warnings)) diff --git a/cspell.json b/cspell.json index 2a9bfb4b544..689b72435ef 100644 --- a/cspell.json +++ b/cspell.json @@ -206,6 +206,8 @@ "unoptimized", "urem", "USERPROFILE", + "vararg", + "varargs", "vecmap", "vitkov", "wasi", diff --git a/docs/docs/explainers/explainer-oracle.md b/docs/docs/explainers/explainer-oracle.md index b84ca5dd986..821e1f95c04 100644 --- a/docs/docs/explainers/explainer-oracle.md +++ b/docs/docs/explainers/explainer-oracle.md @@ -31,7 +31,7 @@ In short, anything that can be constrained in a Noir program but needs to be fet Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. -To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have a oracle call like this: +To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have an oracle call like this: ```rust #[oracle(getNoun)] @@ -52,6 +52,6 @@ If you don't constrain the return of your oracle, you could be clearly opening a On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. -In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they matches the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. +In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they match the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/docs/docs/getting_started/hello_noir/project_breakdown.md b/docs/docs/getting_started/hello_noir/project_breakdown.md index 29688df148f..525b8dabdd8 100644 --- a/docs/docs/getting_started/hello_noir/project_breakdown.md +++ b/docs/docs/getting_started/hello_noir/project_breakdown.md @@ -67,6 +67,7 @@ The package section defines a number of fields including: - `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) - `backend` (optional) - `license` (optional) +- `expression_width` (optional) - Sets the default backend expression width. This field will override the default backend expression width specified by the Noir compiler (currently set to width 4). #### Dependencies section diff --git a/docs/docs/how_to/how-to-oracles.md b/docs/docs/how_to/how-to-oracles.md index df41276cfe1..2f69902062c 100644 --- a/docs/docs/how_to/how-to-oracles.md +++ b/docs/docs/how_to/how-to-oracles.md @@ -46,7 +46,7 @@ unconstrained fn get_sqrt(number: Field) -> Field { } ``` -In this example, we're wrapping our oracle function in a unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: +In this example, we're wrapping our oracle function in an unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: ```rust fn main(input: Field) { @@ -234,7 +234,7 @@ const client = new JSONRPCClient((jsonRPCRequest) => { // declaring a function that takes the name of the foreign call (getSqrt) and the inputs const foreignCallHandler = async (name, input) => { // notice that the "inputs" parameter contains *all* the inputs - // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] + // in this case we make the RPC request with the first parameter "numbers", which would be input[0] const oracleReturn = await client.request(name, [ input[0].map((i) => i.toString("hex")), ]); diff --git a/docs/docs/how_to/how-to-recursion.md b/docs/docs/how_to/how-to-recursion.md index aac84e29fac..71f02fa5435 100644 --- a/docs/docs/how_to/how-to-recursion.md +++ b/docs/docs/how_to/how-to-recursion.md @@ -47,7 +47,7 @@ In a standard recursive app, you're also dealing with at least two circuits. For - `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. - `recursive`: a circuit that verifies `main` -For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. +For a full example of how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. ## Step 1: Setup diff --git a/docs/docs/how_to/how-to-solidity-verifier.md b/docs/docs/how_to/how-to-solidity-verifier.md index e6ed9abaec6..c800d91ac69 100644 --- a/docs/docs/how_to/how-to-solidity-verifier.md +++ b/docs/docs/how_to/how-to-solidity-verifier.md @@ -40,7 +40,7 @@ Generating a Solidity Verifier contract is actually a one-command process. Howev ## Step 1 - Generate a contract -This is by far the most straight-forward step. Just run: +This is by far the most straightforward step. Just run: ```sh nargo compile @@ -99,7 +99,7 @@ This time we will see a warning about an unused function parameter. This is expe ## Step 3 - Deploying -At this point we should have a compiled contract read to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. +At this point we should have a compiled contract ready to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. Looking closely, we will notice that our "Solidity Verifier" is actually three contracts working together: @@ -111,7 +111,7 @@ Remix will take care of the dependencies for us so we can simply deploy the Ultr ![Deploying UltraVerifier](@site/static/img/how-tos/solidity_verifier_5.png) -A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking the deployer contract is the correct one. +A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking that the deployer contract is the correct one. :::note diff --git a/docs/docs/noir/concepts/data_bus.md b/docs/docs/noir/concepts/data_bus.mdx similarity index 88% rename from docs/docs/noir/concepts/data_bus.md rename to docs/docs/noir/concepts/data_bus.mdx index e54fc861257..e55e58622ce 100644 --- a/docs/docs/noir/concepts/data_bus.md +++ b/docs/docs/noir/concepts/data_bus.mdx @@ -2,7 +2,9 @@ title: Data Bus sidebar_position: 13 --- -**Disclaimer** this feature is experimental, do not use it! +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + The data bus is an optimization that the backend can use to make recursion more efficient. In order to use it, you must define some inputs of the program entry points (usually the `main()` diff --git a/docs/docs/noir/concepts/generics.md b/docs/docs/noir/concepts/generics.md index 0c1c27a2221..3e416eee093 100644 --- a/docs/docs/noir/concepts/generics.md +++ b/docs/docs/noir/concepts/generics.md @@ -104,3 +104,60 @@ impl Eq for MyStruct { ``` You can find more details on traits and trait implementations on the [traits page](../concepts/traits.md). + +## Manually Specifying Generics with the Turbofish Operator + +There are times when the compiler cannot reasonably infer what type should be used for a generic, or when the developer themselves may want to manually distinguish generic type parameters. This is where the `::<>` turbofish operator comes into play. + +The `::<>` operator can follow a variable or path and can be used to manually specify generic arguments within the angle brackets. +The name "turbofish" comes from that `::<>` looks like a little fish. + +Examples: +```rust +fn main() { + let mut slice = []; + slice = slice.push_back(1); + slice = slice.push_back(2); + // Without turbofish a type annotation would be needed on the left hand side + let array = slice.as_array::<2>(); +} +``` +```rust +fn double() -> u32 { + N * 2 +} +fn example() { + assert(double::<9>() == 18); + assert(double::<7 + 8>() == 30); +} +``` +```rust +trait MyTrait { + fn ten() -> Self; +} + +impl MyTrait for Field { + fn ten() -> Self { 10 } +} + +struct Foo { + inner: T +} + +impl Foo { + fn generic_method(_self: Self) -> U where U: MyTrait { + U::ten() + } +} + +fn example() { + let foo: Foo = Foo { inner: 1 }; + // Using a type other than `Field` here (e.g. u32) would fail as + // there is no matching impl for `u32: MyTrait`. + // + // Substituting the `10` on the left hand side of this assert + // with `10 as u32` would also fail with a type mismatch as we + // are expecting a `Field` from the right hand side. + assert(10 as u32 == foo.generic_method::()); +} +``` diff --git a/docs/docs/noir/concepts/oracles.md b/docs/docs/noir/concepts/oracles.mdx similarity index 78% rename from docs/docs/noir/concepts/oracles.md rename to docs/docs/noir/concepts/oracles.mdx index aa380b5f7b8..77a2ac1550a 100644 --- a/docs/docs/noir/concepts/oracles.md +++ b/docs/docs/noir/concepts/oracles.mdx @@ -11,11 +11,9 @@ keywords: sidebar_position: 6 --- -:::note +import Experimental from '@site/src/components/Notes/_experimental.mdx'; -This is an experimental feature that is not fully documented. If you notice any outdated information or potential improvements to this page, pull request contributions are very welcome: https://github.com/noir-lang/noir - -::: + Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. diff --git a/docs/docs/noir/modules_packages_crates/modules.md b/docs/docs/noir/modules_packages_crates/modules.md index 9fffd925b7b..16b6307d2fd 100644 --- a/docs/docs/noir/modules_packages_crates/modules.md +++ b/docs/docs/noir/modules_packages_crates/modules.md @@ -148,4 +148,38 @@ Filename : `src/foo/bar/mod.nr` ```rust fn from_bar() {} +``` + +### Referencing a parent module + +Given a submodule, you can refer to its parent module using the `super` keyword. + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; + +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +// Same as bar::from_foo +use super::from_foo; + +fn from_bar() { + from_foo(); // invokes super::from_foo(), which is bar::from_foo() + super::from_foo(); // also invokes bar::from_foo() +} ``` \ No newline at end of file diff --git a/docs/src/components/Notes/_experimental.mdx b/docs/src/components/Notes/_experimental.mdx index da1b0826aa1..1c0b2448ad3 100644 --- a/docs/src/components/Notes/_experimental.mdx +++ b/docs/src/components/Notes/_experimental.mdx @@ -1,6 +1,7 @@ -:::caution +:::caution Experimental Feature -This feature is experimental. You should expect it to change in future versions, -cause unexpected behavior, or simply not work at all. +This feature is experimental. The documentation may be incomplete or out of date, which means it could change in future versions, potentially causing unexpected behavior or not working as expected. + +**Contributions Welcome:** If you notice any inaccuracies or potential improvements, please consider contributing. Visit our GitHub repository to make your contributions: [Contribute Here](https://github.com/noir-lang/noir). ::: diff --git a/docs/versioned_docs/version-v0.32.0/explainers/explainer-oracle.md b/docs/versioned_docs/version-v0.32.0/explainers/explainer-oracle.md new file mode 100644 index 00000000000..821e1f95c04 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/explainers/explainer-oracle.md @@ -0,0 +1,57 @@ +--- +title: Oracles +description: This guide provides an in-depth understanding of how Oracles work in Noir programming. Learn how to use outside calculations in your programs, constrain oracles, and understand their uses and limitations. +keywords: + - Noir Programming + - Oracles + - JSON-RPC + - Foreign Call Handlers + - Constrained Functions + - Blockchain Programming +sidebar_position: 1 +--- + +If you've seen "The Matrix" you may recall "The Oracle" as Gloria Foster smoking cigarettes and baking cookies. While she appears to "know things", she is actually providing a calculation of a pre-determined future. Noir Oracles are similar, in a way. They don't calculate the future (yet), but they allow you to use outside calculations in your programs. + +![matrix oracle prediction](@site/static/img/memes/matrix_oracle.jpeg) + +A Noir program is usually self-contained. You can pass certain inputs to it, and it will generate a deterministic output for those inputs. But what if you wanted to defer some calculation to an outside process or source? + +Oracles are functions that provide this feature. + +## Use cases + +An example usage for Oracles is proving something on-chain. For example, proving that the ETH-USDC quote was below a certain target at a certain block time. Or even making more complex proofs like proving the ownership of an NFT as an anonymous login method. + +Another interesting use case is to defer expensive calculations to be made outside of the Noir program, and then constraining the result; similar to the use of [unconstrained functions](../noir/concepts//unconstrained.md). + +In short, anything that can be constrained in a Noir program but needs to be fetched from an external source is a great candidate to be used in oracles. + +## Constraining oracles + +Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. + +To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have an oracle call like this: + +```rust +#[oracle(getNoun)] +unconstrained fn get_noun(address: Field) -> Field +``` + +This oracle could naively resolve with the number of Nouns she possesses. However, it is useless as a trusted source, as the oracle could resolve to anything Alice wants. In order to make this oracle call actually useful, Alice would need to constrain the response from the oracle, by proving her address and the noun count belongs to the state tree of the contract. + +In short, **Oracles don't prove anything. Your Noir program does.** + +:::danger + +If you don't constrain the return of your oracle, you could be clearly opening an attack vector on your Noir program. Make double-triple sure that the return of an oracle call is constrained! + +::: + +## How to use Oracles + +On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. + +In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they match the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. + +If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/docs/versioned_docs/version-v0.32.0/explainers/explainer-recursion.md b/docs/versioned_docs/version-v0.32.0/explainers/explainer-recursion.md new file mode 100644 index 00000000000..18846176ca7 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/explainers/explainer-recursion.md @@ -0,0 +1,176 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +pagination_next: how_to/how-to-recursion +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof + +:::info + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he doesn't exactly *prove* the verification of Alice's proof. Instead, he *aggregates* his proof to Alice's proof. The actual verification is done when the full proof is verified, for example when using `nargo verify` or through the verifier smart contract. + +We can imagine recursive proofs a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +::: + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. + +## How can I try it + +Learn more about using recursion in Nargo and NoirJS in the [how-to guide](../how_to/how-to-recursion.md) and see a full example in [noir-examples](https://github.com/noir-lang/noir-examples). diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/_category_.json b/docs/versioned_docs/version-v0.32.0/getting_started/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json b/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json new file mode 100644 index 00000000000..27a8e89228d --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 1, + "label": "Install Barretenberg", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md b/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md new file mode 100644 index 00000000000..0102c86770b --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md @@ -0,0 +1,47 @@ +--- +title: Barretenberg Installation +description: bb is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` +keywords: [ + Barretenberg + bb + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches +] +pagination_next: getting_started/hello_noir/index +--- + +`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. + +## Installing `bb` + +Open a terminal on your machine, and write: + +##### macOS (Apple Silicon) + +```bash +curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash +source ~/.zshrc +bbup -v 0.41.0 +``` + +##### macOS (Intel) + +```bash +curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash +source ~/.zshrc +bbup -v 0.41.0 +``` + +##### Linux (Bash) + +```bash +curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash +source ~/.bashrc +bbup -v 0.41.0 +``` + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/_category_.json b/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/_category_.json new file mode 100644 index 00000000000..976a2325de0 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md b/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md new file mode 100644 index 00000000000..1ade3f09ae3 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md @@ -0,0 +1,145 @@ +--- +title: Creating a Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ which contain the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../../noir/concepts/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. + +## Execute Our Noir Program + +Now that the project is set up, we can execute our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Execute your Noir program: + +```sh +nargo execute witness-name +``` + +The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. + +## Prove Our Noir Program + +:::info + +Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). + +::: + +Prove the valid execution of your Noir program using `bb`: + +```sh +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof +``` + +A new file called `proof` will be generated in your project directory, containing the generated proof for your program. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. + +Verify your proof by running: + +```sh +bb write_vk -b ./target/hello_world.json -o ./target/vk +bb verify -k ./target/vk -p ./proof +``` + +The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md b/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md new file mode 100644 index 00000000000..525b8dabdd8 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md @@ -0,0 +1,160 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover TOML + file, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program from the previous section. We elaborate on the project +structure and what the `prove` and `verify` commands did. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section defines a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) +- `expression_width` (optional) - Sets the default backend expression width. This field will override the default backend expression width specified by the Noir compiler (currently set to width 4). + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is constrained by the proof of the execution of said program (i.e. if the condition was not met, the verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply the inputs to the Noir program (both private and public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo execute` is executed, nargo will execute the Noir program using the inputs specified in `Prover.toml`, aborting if it finds that these do not satisfy the constraints defined by `main`. In this example, `x` and `y` must satisfy the inequality constraint `assert(x != y)`. + +If an output name is specified such as `nargo execute foo`, the witness generated by this execution will be written to `./target/foo.gz`. This can then be used to generate a proof of the execution. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for execution by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the witness and saves it at `./target/foo.gz`: + +```bash +nargo execute foo +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates the witness and saves it at `./target/bar.gz`: + +```bash +nargo execute -p OtherProver bar +``` + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/installation/_category_.json b/docs/versioned_docs/version-v0.32.0/getting_started/installation/_category_.json new file mode 100644 index 00000000000..0c02fb5d4d7 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md b/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md new file mode 100644 index 00000000000..4ef86aa5914 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md @@ -0,0 +1,48 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +pagination_next: getting_started/hello_noir/index +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.32.0/getting_started/installation/other_install_methods.md new file mode 100644 index 00000000000..3634723562b --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/installation/other_install_methods.md @@ -0,0 +1,102 @@ +--- +title: Alternative Installations +description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains how to specify which version to install when using noirup, and using WSL for windows. +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Uninstalling Nargo + ] +sidebar_position: 1 +--- + +## Encouraged Installation Method: Noirup + +Noirup is the endorsed method for installing Nargo, streamlining the process of fetching binaries or compiling from source. It supports a range of options to cater to your specific needs, from nightly builds and specific versions to compiling from various sources. + +### Installing Noirup + +First, ensure you have `noirup` installed: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +### Fetching Binaries + +With `noirup`, you can easily switch between different Nargo versions, including nightly builds: + +- **Nightly Version**: Install the latest nightly build. + + ```sh + noirup --version nightly + ``` + +- **Specific Version**: Install a specific version of Nargo. + ```sh + noirup --version + ``` + +### Compiling from Source + +`noirup` also enables compiling Nargo from various sources: + +- **From a Specific Branch**: Install from the latest commit on a branch. + + ```sh + noirup --branch + ``` + +- **From a Fork**: Install from the main branch of a fork. + + ```sh + noirup --repo + ``` + +- **From a Specific Branch in a Fork**: Install from a specific branch in a fork. + + ```sh + noirup --repo --branch + ``` + +- **From a Specific Pull Request**: Install from a specific PR. + + ```sh + noirup --pr + ``` + +- **From a Specific Commit**: Install from a specific commit. + + ```sh + noirup -C + ``` + +- **From Local Source**: Compile and install from a local directory. + ```sh + noirup --path ./path/to/local/source + ``` + +## Installation on Windows + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#encouraged-installation-method-noirup). + +## Uninstalling Nargo + +If you installed Nargo with `noirup`, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` diff --git a/docs/versioned_docs/version-v0.32.0/getting_started/tooling/noir_codegen.md b/docs/versioned_docs/version-v0.32.0/getting_started/tooling/noir_codegen.md new file mode 100644 index 00000000000..f7505bef7ab --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/getting_started/tooling/noir_codegen.md @@ -0,0 +1,114 @@ +--- +title: Noir Codegen for TypeScript +description: Learn how to use Noir codegen to generate TypeScript bindings +keywords: [Nargo, Noir, compile, TypeScript] +sidebar_position: 3 +--- + +When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. + +Now you can generate TypeScript bindings for your Noir programs in two steps: +1. Exporting Noir functions using `nargo export` +2. Using the TypeScript module `noir_codegen` to generate TypeScript binding + +**Note:** you can only export functions from a Noir *library* (not binary or contract program types). + +## Installation + +### Your TypeScript project + +If you don't already have a TypeScript project you can add the module with `yarn` (or `npm`), then initialize it: + +```bash +yarn add typescript -D +npx tsc --init +``` + +### Add TypeScript module - `noir_codegen` + +The following command will add the module to your project's devDependencies: + +```bash +yarn add @noir-lang/noir_codegen -D +``` + +### Nargo library +Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../installation/index.md). + +If you're in a new project, make a `circuits` folder and create a new Noir library: + +```bash +mkdir circuits && cd circuits +nargo new --lib myNoirLib +``` + +## Usage + +### Export ABI of specified functions + +First go to the `.nr` files in your Noir library, and add the `#[export]` macro to each function that you want to use in TypeScript. + +```rust +#[export] +fn your_function(... +``` + +From your Noir library (where `Nargo.toml` is), run the following command: + +```bash +nargo export +``` + +You will now have an `export` directory with a .json file per exported function. + +You can also specify the directory of Noir programs using `--program-dir`, for example: + +```bash +nargo export --program-dir=./circuits/myNoirLib +``` + +### Generate TypeScript bindings from exported functions + +To use the `noir-codegen` package we added to the TypeScript project: + +```bash +yarn noir-codegen ./export/your_function.json +``` + +This creates an `exports` directory with an `index.ts` file containing all exported functions. + +**Note:** adding `--out-dir` allows you to specify an output dir for your TypeScript bindings to go. Eg: + +```bash +yarn noir-codegen ./export/*.json --out-dir ./path/to/output/dir +``` + +## Example .nr function to .ts output + +Consider a Noir library with this function: + +```rust +#[export] +fn not_equal(x: Field, y: Field) -> bool { + x != y +} +``` + +After the export and codegen steps, you should have an `index.ts` like: + +```typescript +export type Field = string; + + +export const is_equal_circuit: CompiledCircuit = +{"abi":{"parameters":[{"name":"x","type":{"kind":"field"},"visibility":"private"},{"name":"y","type":{"kind":"field"},"visibility":"private"}],"return_type":{"abi_type":{"kind":"boolean"},"visibility":"private"}},"bytecode":"H4sIAAAAAAAA/7WUMQ7DIAxFQ0Krrr2JjSGYLVcpKrn/CaqqDQN12WK+hPBgmWd/wEyHbF1SS923uhOs3pfoChI+wKXMAXzIKyNj4PB0TFTYc0w5RUjoqeAeEu1wqK0F54RGkWvW44LPzExnlkbMEs4JNZmN8PxS42uHv82T8a3Jeyn2Ks+VLPcO558HmyLMCDOXAXXtpPt4R/Rt9T36ss6dS9HGPx/eG17nGegKBQAA"}; + +export async function is_equal(x: Field, y: Field, foreignCallHandler?: ForeignCallHandler): Promise { + const program = new Noir(is_equal_circuit); + const args: InputMap = { x, y }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as boolean; +} +``` + +Now the `is_equal()` function and relevant types are readily available for use in TypeScript. diff --git a/docs/versioned_docs/version-v0.32.0/how_to/_category_.json b/docs/versioned_docs/version-v0.32.0/how_to/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/how_to/debugger/_category_.json b/docs/versioned_docs/version-v0.32.0/how_to/debugger/_category_.json new file mode 100644 index 00000000000..cc2cbb1c253 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugging", + "position": 5, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_the_repl.md b/docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_the_repl.md new file mode 100644 index 00000000000..09e5bae68ad --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_the_repl.md @@ -0,0 +1,164 @@ +--- +title: Using the REPL Debugger +description: + Step by step guide on how to debug your Noir circuits with the REPL Debugger. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +#### Pre-requisites + +In order to use the REPL debugger, first you need to install recent enough versions of Nargo and vscode-noir. + +## Debugging a simple circuit + +Let's debug a simple circuit: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +To start the REPL debugger, using a terminal, go to a Noir circuit's home directory. Then: + +`$ nargo debug` + +You should be seeing this in your terminal: + +``` +[main] Starting debugger +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:9 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> +``` + +The debugger displays the current Noir code location, and it is now waiting for us to drive it. + +Let's first take a look at the available commands. For that we'll use the `help` command. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +Some commands operate only for unconstrained functions, such as `memory` and `memset`. If you try to use them while execution is paused at an ACIR opcode, the debugger will simply inform you that you are not executing unconstrained code: + +``` +> memory +Unconstrained VM memory not available +> +``` + +Before continuing, we can take a look at the initial witness map: + +``` +> witness +_0 = 1 +_1 = 2 +> +``` + +Cool, since `x==1`, `y==2`, and we want to check that `x != y`, our circuit should succeed. At this point we could intervene and use the witness setter command to change one of the witnesses. Let's set `y=3`, then back to 2, so we don't affect the expected result: + +``` +> witness +_0 = 1 +_1 = 2 +> witness 1 3 +_1 = 3 +> witness +_0 = 1 +_1 = 3 +> witness 1 2 +_1 = 2 +> witness +_0 = 1 +_1 = 2 +> +``` + +Now we can inspect the current state of local variables. For that we use the `vars` command. + +``` +> vars +> +``` + +We currently have no vars in context, since we are at the entry point of the program. Let's use `next` to execute until the next point in the program. + +``` +> vars +> next +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:20 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> vars +x:Field = 0x01 +``` + +As a result of stepping, the variable `x`, whose initial value comes from the witness map, is now in context and returned by `vars`. + +``` +> next + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> vars +y:Field = 0x02 +x:Field = 0x01 +``` + +Stepping again we can finally see both variables and their values. And now we can see that the next assertion should succeed. + +Let's continue to the end: + +``` +> continue +(Continuing execution...) +Finished execution +> q +[main] Circuit witness successfully solved +``` + +Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. + +We just went through the basics of debugging using Noir REPL debugger. For a comprehensive reference, check out [the reference page](../../reference/debugger/debugger_repl.md). diff --git a/docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_vs_code.md b/docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_vs_code.md new file mode 100644 index 00000000000..a5858c1a5eb --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/debugger/debugging_with_vs_code.md @@ -0,0 +1,68 @@ +--- +title: Using the VS Code Debugger +description: + Step by step guide on how to debug your Noir circuits with the VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +This guide will show you how to use VS Code with the vscode-noir extension to debug a Noir project. + +#### Pre-requisites + +- Nargo +- vscode-noir +- A Noir project with a `Nargo.toml`, `Prover.toml` and at least one Noir (`.nr`) containing an entry point function (typically `main`). + +## Running the debugger + +The easiest way to start debugging is to open the file you want to debug, and press `F5`. This will cause the debugger to launch, using your `Prover.toml` file as input. + +You should see something like this: + +![Debugger launched](@site/static/img/debugger/1-started.png) + +Let's inspect the state of the program. For that, we open VS Code's _Debug pane_. Look for this icon: + +![Debug pane icon](@site/static/img/debugger/2-icon.png) + +You will now see two categories of variables: Locals and Witness Map. + +![Debug pane expanded](@site/static/img/debugger/3-debug-pane.png) + +1. **Locals**: variables of your program. At this point in execution this section is empty, but as we step through the code it will get populated by `x`, `result`, `digest`, etc. + +2. **Witness map**: these are initially populated from your project's `Prover.toml` file. In this example, they will be used to populate `x` and `result` at the beginning of the `main` function. + +Most of the time you will probably be focusing mostly on locals, as they represent the high level state of your program. + +You might be interested in inspecting the witness map in case you are trying to solve a really low level issue in the compiler or runtime itself, so this concerns mostly advanced or niche users. + +Let's step through the program, by using the debugger buttons or their corresponding keyboard shortcuts. + +![Debugger buttons](@site/static/img/debugger/4-debugger-buttons.png) + +Now we can see in the variables pane that there's values for `digest`, `result` and `x`. + +![Inspecting locals](@site/static/img/debugger/5-assert.png) + +We can also inspect the values of variables by directly hovering on them on the code. + +![Hover locals](@site/static/img/debugger/6-hover.png) + +Let's set a break point at the `keccak256` function, so we can continue execution up to the point when it's first invoked without having to go one step at a time. + +We just need to click the to the right of the line number 18. Once the breakpoint appears, we can click the `continue` button or use its corresponding keyboard shortcut (`F5` by default). + +![Breakpoint](@site/static/img/debugger/7-break.png) + +Now we are debugging the `keccak256` function, notice the _Call Stack pane_ at the lower right. This lets us inspect the current call stack of our process. + +That covers most of the current debugger functionalities. Check out [the reference](../../reference/debugger/debugger_vscode.md) for more details on how to configure the debugger. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/how_to/how-to-oracles.md b/docs/versioned_docs/version-v0.32.0/how_to/how-to-oracles.md new file mode 100644 index 00000000000..2f69902062c --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/how-to-oracles.md @@ -0,0 +1,273 @@ +--- +title: How to use Oracles +description: Learn how to use oracles in your Noir program with examples in both Nargo and NoirJS. This guide also covers writing a JSON RPC server and providing custom foreign call handlers for NoirJS. +keywords: + - Noir Programming + - Oracles + - Nargo + - NoirJS + - JSON RPC Server + - Foreign Call Handlers +sidebar_position: 1 +--- + +This guide shows you how to use oracles in your Noir program. For the sake of clarity, it assumes that: + +- You have read the [explainer on Oracles](../explainers/explainer-oracle.md) and are comfortable with the concept. +- You have a Noir program to add oracles to. You can create one using the [vite-hardhat starter](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) as a boilerplate. +- You understand the concept of a JSON-RPC server. Visit the [JSON-RPC website](https://www.jsonrpc.org/) if you need a refresher. +- You are comfortable with server-side JavaScript (e.g. Node.js, managing packages, etc.). + +For reference, you can find the snippets used in this tutorial on the [Aztec DevRel Repository](https://github.com/AztecProtocol/dev-rel/tree/main/code-snippets/how-to-oracles). + +## Rundown + +This guide has 3 major steps: + +1. How to modify our Noir program to make use of oracle calls as unconstrained functions +2. How to write a JSON RPC Server to resolve these oracle calls with Nargo +3. How to use them in Nargo and how to provide a custom resolver in NoirJS + +## Step 1 - Modify your Noir program + +An oracle is defined in a Noir program by defining two methods: + +- An unconstrained method - This tells the compiler that it is executing an [unconstrained functions](../noir/concepts//unconstrained.md). +- A decorated oracle method - This tells the compiler that this method is an RPC call. + +An example of an oracle that returns a `Field` would be: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(number: Field) -> Field { } + +unconstrained fn get_sqrt(number: Field) -> Field { + sqrt(number) +} +``` + +In this example, we're wrapping our oracle function in an unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); +} +``` + +In the next section, we will make this `getSqrt` (defined on the `sqrt` decorator) be a method of the RPC server Noir will use. + +:::danger + +As explained in the [Oracle Explainer](../explainers/explainer-oracle.md), this `main` function is unsafe unless you constrain its return value. For example: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); + assert(sqrt.pow_32(2) as u64 == input as u64); // <---- constrain the return of an oracle! +} +``` + +::: + +:::info + +Currently, oracles only work with single params or array params. For example: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt([Field; 2]) -> [Field; 2] { } +``` + +::: + +## Step 2 - Write an RPC server + +Brillig will call *one* RPC server. Most likely you will have to write your own, and you can do it in whatever language you prefer. In this guide, we will do it in Javascript. + +Let's use the above example of an oracle that consumes an array with two `Field` and returns their square roots: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(input: [Field; 2]) -> [Field; 2] { } + +unconstrained fn get_sqrt(input: [Field; 2]) -> [Field; 2] { + sqrt(input) +} + +fn main(input: [Field; 2]) { + let sqrt = get_sqrt(input); + assert(sqrt[0].pow_32(2) as u64 == input[0] as u64); + assert(sqrt[1].pow_32(2) as u64 == input[1] as u64); +} +``` + +:::info + +Why square root? + +In general, computing square roots is computationally more expensive than multiplications, which takes a toll when speaking about ZK applications. In this case, instead of calculating the square root in Noir, we are using our oracle to offload that computation to be made in plain. In our circuit we can simply multiply the two values. + +::: + +Now, we should write the correspondent RPC server, starting with the [default JSON-RPC 2.0 boilerplate](https://www.npmjs.com/package/json-rpc-2.0#example): + +```js +import { JSONRPCServer } from "json-rpc-2.0"; +import express from "express"; +import bodyParser from "body-parser"; + +const app = express(); +app.use(bodyParser.json()); + +const server = new JSONRPCServer(); +app.post("/", (req, res) => { + const jsonRPCRequest = req.body; + server.receive(jsonRPCRequest).then((jsonRPCResponse) => { + if (jsonRPCResponse) { + res.json(jsonRPCResponse); + } else { + res.sendStatus(204); + } + }); +}); + +app.listen(5555); +``` + +Now, we will add our `getSqrt` method, as expected by the `#[oracle(getSqrt)]` decorator in our Noir code. It maps through the params array and returns their square roots: + +```js +server.addMethod("resolve_function_call", async (params) => { + if params.function !== "getSqrt" { + throw Error("Unexpected foreign call") + }; + const values = params.inputs[0].Array.map((field) => { + return `${Math.sqrt(parseInt(field, 16))}`; + }); + return { values: [{ Array: values }] }; +}); +``` + +If you're using Typescript, the following types may be helpful in understanding the expected return value and making sure they're easy to follow: + +```js +interface SingleForeignCallParam { + Single: string, +} + +interface ArrayForeignCallParam { + Array: string[], +} + +type ForeignCallParam = SingleForeignCallParam | ArrayForeignCallParam; + +interface ForeignCallResult { + values: ForeignCallParam[], +} +``` + +::: Multidimensional Arrays + +If the Oracle function is returning an array containing other arrays, such as `[['1','2],['3','4']]`, you need to provide the values in json as flattened values. In the previous example, it would be `['1', '2', '3', '4']`. In the noir program, the Oracle signature can use a nested type, the flattened values will be automatically converted to the nested type. + +::: + +## Step 3 - Usage with Nargo + +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test` and `nargo execute` commands by passing a value to `--oracle-resolver`. For example: + +```bash +nargo test --oracle-resolver http://localhost:5555 +``` + +This tells `nargo` to use your RPC Server URL whenever it finds an oracle decorator. + +## Step 4 - Usage with NoirJS + +In a JS environment, an RPC server is not strictly necessary, as you may want to resolve your oracles without needing any JSON call at all. NoirJS simply expects that you pass a callback function when you generate proofs, and that callback function can be anything. + +For example, if your Noir program expects the host machine to provide CPU pseudo-randomness, you could simply pass it as the `foreignCallHandler`. You don't strictly need to create an RPC server to serve pseudo-randomness, as you may as well get it directly in your app: + +```js +const foreignCallHandler = (name, inputs) => crypto.randomBytes(16) // etc + +await noir.execute(inputs, foreignCallHandler) +``` + +As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) function simply means "a callback function that returns a value of type [`ForeignCallOutput`](../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md). It doesn't have to be an RPC call like in the case for Nargo. + +:::tip + +Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? + +You don't technically have to, but then how would you run `nargo test`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. + +::: + +In this case, let's make `foreignCallHandler` call the JSON RPC Server we created in [Step #2](#step-2---write-an-rpc-server), by making it a JSON RPC Client. + +For example, using the same `getSqrt` program in [Step #1](#step-1---modify-your-noir-program) (comments in the code): + +```js +import { JSONRPCClient } from "json-rpc-2.0"; + +// declaring the JSONRPCClient +const client = new JSONRPCClient((jsonRPCRequest) => { +// hitting the same JSON RPC Server we coded above + return fetch("http://localhost:5555", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(jsonRPCRequest), + }).then((response) => { + if (response.status === 200) { + return response + .json() + .then((jsonRPCResponse) => client.receive(jsonRPCResponse)); + } else if (jsonRPCRequest.id !== undefined) { + return Promise.reject(new Error(response.statusText)); + } + }); +}); + +// declaring a function that takes the name of the foreign call (getSqrt) and the inputs +const foreignCallHandler = async (name, input) => { + // notice that the "inputs" parameter contains *all* the inputs + // in this case we make the RPC request with the first parameter "numbers", which would be input[0] + const oracleReturn = await client.request(name, [ + input[0].map((i) => i.toString("hex")), + ]); + return { values: oracleReturn }; +}; + +// the rest of your NoirJS code +const input = { input: [4, 16] }; +const { witness } = await noir.execute(numbers, foreignCallHandler); +``` + +:::tip + +If you're in a NoirJS environment running your RPC server together with a frontend app, you'll probably hit a familiar problem in full-stack development: requests being blocked by [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policy. For development only, you can simply install and use the [`cors` npm package](https://www.npmjs.com/package/cors) to get around the problem: + +```bash +yarn add cors +``` + +and use it as a middleware: + +```js +import cors from "cors"; + +const app = express(); +app.use(cors()) +``` + +::: + +## Conclusion + +Hopefully by the end of this guide, you should be able to: + +- Write your own logic around Oracles and how to write a JSON RPC server to make them work with your Nargo commands. +- Provide custom foreign call handlers for NoirJS. diff --git a/docs/versioned_docs/version-v0.32.0/how_to/how-to-recursion.md b/docs/versioned_docs/version-v0.32.0/how_to/how-to-recursion.md new file mode 100644 index 00000000000..71f02fa5435 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/how-to-recursion.md @@ -0,0 +1,180 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume the following: + +- `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. +- `recursive`: a circuit that verifies `main` + +For a full example of how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate recursive proof artifacts that will be passed to the circuit that is verifying the proof we just generated. Instead of passing the proof and verification key as a byte array, we pass them as fields which makes it cheaper to verify in a circuit: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateRecursiveProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]` and verification keys in Barretenberg are always `[Field; 114]`. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateProof(witness) +const verified = backend.verifyProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { + main: mainJSON, + recursive: recursiveJSON +} +const backends = { + main: new BarretenbergBackend(circuits.main), + recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { + main: new Noir(circuits.main), + recursive: new Noir(circuits.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateRecursiveProofArtifacts( + proof, + numPublicInputs, +); +const { witness: recursiveWitness } = await noir_programs.recursive.execute(recursiveInputs) +const recursiveProof = await backends.recursive.generateProof(recursiveWitness); +``` + +::: diff --git a/docs/versioned_docs/version-v0.32.0/how_to/how-to-solidity-verifier.md b/docs/versioned_docs/version-v0.32.0/how_to/how-to-solidity-verifier.md new file mode 100644 index 00000000000..c800d91ac69 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/how-to-solidity-verifier.md @@ -0,0 +1,251 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +pagination_next: tutorials/noirjs_app +--- + +Noir has the ability to generate a verifier contract in Solidity, which can be deployed in many EVM-compatible blockchains such as Ethereum. + +This allows for a powerful feature set, as one can make use of the conciseness and the privacy provided by Noir in an immutable ledger. Applications can range from simple P2P guessing games, to complex private DeFi interactions. + +This guide shows you how to generate a Solidity Verifier and deploy it on the [Remix IDE](https://remix.ethereum.org/). It is assumed that: + +- You are comfortable with the Solidity programming language and understand how contracts are deployed on the Ethereum network +- You have Noir installed and you have a Noir program. If you don't, [get started](../getting_started/installation/index.md) with Nargo and the example Hello Noir circuit +- You are comfortable navigating RemixIDE. If you aren't or you need a refresher, you can find some video tutorials [here](https://www.youtube.com/channel/UCjTUPyFEr2xDGN6Cg8nKDaA) that could help you. + +## Rundown + +Generating a Solidity Verifier contract is actually a one-command process. However, compiling it and deploying it can have some caveats. Here's the rundown of this guide: + +1. How to generate a solidity smart contract +2. How to compile the smart contract in the RemixIDE +3. How to deploy it to a testnet + +## Step 1 - Generate a contract + +This is by far the most straightforward step. Just run: + +```sh +nargo compile +``` + +This will compile your source code into a Noir build artifact to be stored in the `./target` directory, you can then generate the smart contract using the commands: + +```sh +# Here we pass the path to the newly generated Noir artifact. +bb write_vk -b ./target/.json +bb contract +``` + +replacing `` with the name of your Noir project. A new `contract` folder would then be generated in your project directory, containing the Solidity +file `contract.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. + +:::info + +It is possible to generate verifier contracts of Noir programs for other smart contract platforms as long as the proving backend supplies an implementation. + +Barretenberg, the default proving backend for Nargo, supports generation of verifier contracts, for the time being these are only in Solidity. +::: + +## Step 2 - Compiling + +We will mostly skip the details of RemixIDE, as the UI can change from version to version. For now, we can just open +Remix and create a blank workspace. + +![Create Workspace](@site/static/img/how-tos/solidity_verifier_1.png) + +We will create a new file to contain the contract Nargo generated, and copy-paste its content. + +:::warning + +You'll likely see a warning advising you to not trust pasted code. While it is an important warning, it is irrelevant in the context of this guide and can be ignored. We will not be deploying anywhere near a mainnet. + +::: + +To compile our the verifier, we can navigate to the compilation tab: + +![Compilation Tab](@site/static/img/how-tos/solidity_verifier_2.png) + +Remix should automatically match a suitable compiler version. However, hitting the "Compile" button will most likely generate a "Stack too deep" error: + +![Stack too deep](@site/static/img/how-tos/solidity_verifier_3.png) + +This is due to the verify function needing to put many variables on the stack, but enabling the optimizer resolves the issue. To do this, let's open the "Advanced Configurations" tab and enable optimization. The default 200 runs will suffice. + +:::info + +This time we will see a warning about an unused function parameter. This is expected, as the `verify` function doesn't use the `_proof` parameter inside a solidity block, it is loaded from calldata and used in assembly. + +::: + +![Compilation success](@site/static/img/how-tos/solidity_verifier_4.png) + +## Step 3 - Deploying + +At this point we should have a compiled contract ready to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. + +Looking closely, we will notice that our "Solidity Verifier" is actually three contracts working together: + +- An `UltraVerificationKey` library which simply stores the verification key for our circuit. +- An abstract contract `BaseUltraVerifier` containing most of the verifying logic. +- A main `UltraVerifier` contract that inherits from the Base and uses the Key contract. + +Remix will take care of the dependencies for us so we can simply deploy the UltraVerifier contract by selecting it and hitting "deploy": + +![Deploying UltraVerifier](@site/static/img/how-tos/solidity_verifier_5.png) + +A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking that the deployer contract is the correct one. + +:::note + +Why "UltraVerifier"? + +To be precise, the Noir compiler (`nargo`) doesn't generate the verifier contract directly. It compiles the Noir code into an intermediate language (ACIR), which is then executed by the backend. So it is the backend that returns the verifier smart contract, not Noir. + +In this case, the Barretenberg Backend uses the UltraPlonk proving system, hence the "UltraVerifier" name. + +::: + +## Step 4 - Verifying + +To verify a proof using the Solidity verifier contract, we call the `verify` function in this extended contract: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. Note that the public inputs must be passed in separately to the rest of the proof so we must split the proof as returned from `bb`. + +First generate a proof with `bb` at the location `./proof` using the steps in [get started](../getting_started/hello_noir/index.md), this proof is in a binary format but we want to convert it into a hex string to pass into Remix, this can be done with the + +```bash +# This value must be changed to match the number of public inputs (including return values!) in your program. +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=32*NUM_PUBLIC_INPUTS +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) ./proof | od -An -v -t x1 | tr -d $' \n') + +echo "Public inputs:" +echo $HEX_PUBLIC_INPUTS + +echo "Proof:" +echo "0x$HEX_PROOF" +``` + +Remix expects that the public inputs will be split into an array of `bytes32` values so `HEX_PUBLIC_INPUTS` needs to be split up into 32 byte chunks which are prefixed with `0x` accordingly. + +A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +:::info[Return Values] + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of the circuit program. + +For example, if you have Noir program like this: + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. + +Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return`. + +::: + +:::tip[Structs] + +You can pass structs to the verifier contract. They will be flattened so that the array of inputs is 1-dimensional array. + +For example, consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +::: + +The other function you can call is our entrypoint `verify` function, as defined above. + +:::tip + +It's worth noticing that the `verify` function is actually a `view` function. A `view` function does not alter the blockchain state, so it doesn't need to be distributed (i.e. it will run only on the executing node), and therefore doesn't cost any gas. + +This can be particularly useful in some situations. If Alice generated a proof and wants Bob to verify its correctness, Bob doesn't need to run Nargo, NoirJS, or any Noir specific infrastructure. He can simply make a call to the blockchain with the proof and verify it is correct without paying any gas. + +It would be incorrect to say that a Noir proof verification costs any gas at all. However, most of the time the result of `verify` is used to modify state (for example, to update a balance, a game state, etc). In that case the whole network needs to execute it, which does incur gas costs (calldata and execution, but not storage). + +::: + +## A Note on EVM chains + +Noir proof verification requires the ecMul, ecAdd and ecPairing precompiles. Not all EVM chains support EC Pairings, notably some of the ZK-EVMs. This means that you won't be able to use the verifier contract in all of them. You can find an incomplete list of which EVM chains support these precompiles [here](https://www.evmdiff.com/features?feature=precompiles). + +For example, chains like `zkSync ERA` and `Polygon zkEVM` do not currently support these precompiles, so proof verification via Solidity verifier contracts won't work. Here's a quick list of EVM chains that have been tested and are known to work: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +## What's next + +Now that you know how to call a Noir Solidity Verifier on a smart contract using Remix, you should be comfortable with using it with some programmatic frameworks, such as [hardhat](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) and [foundry](https://github.com/noir-lang/noir-starter/tree/main/with-foundry). + +You can find other tools, examples, boilerplates and libraries in the [awesome-noir](https://github.com/noir-lang/awesome-noir) repository. + +You should also be ready to write and deploy your first NoirJS app and start generating proofs on websites, phones, and NodeJS environments! Head on to the [NoirJS tutorial](../tutorials/noirjs_app.md) to learn how to do that. diff --git a/docs/versioned_docs/version-v0.32.0/how_to/merkle-proof.mdx b/docs/versioned_docs/version-v0.32.0/how_to/merkle-proof.mdx new file mode 100644 index 00000000000..0a128adb2de --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +sidebar_position: 4 +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message.as_slice()); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message.as_slice()); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.32.0/how_to/using-devcontainers.mdx b/docs/versioned_docs/version-v0.32.0/how_to/using-devcontainers.mdx new file mode 100644 index 00000000000..727ec6ca667 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/how_to/using-devcontainers.mdx @@ -0,0 +1,110 @@ +--- +title: Developer Containers and Codespaces +description: "Learn how to set up a devcontainer in your GitHub repository for a seamless coding experience with Codespaces. Follow our easy 8-step guide to create your own Noir environment without installing Nargo locally." +keywords: ["Devcontainer", "Codespaces", "GitHub", "Noir Environment", "Docker Image", "Development Environment", "Remote Coding", "GitHub Codespaces", "Noir Programming", "Nargo", "VSCode Extensions", "Noirup"] +sidebar_position: 1 +--- + +Adding a developer container configuration file to your Noir project is one of the easiest way to unlock coding in browser. + +## What's a devcontainer after all? + +A [Developer Container](https://containers.dev/) (devcontainer for short) is a Docker image that comes preloaded with tools, extensions, and other tools you need to quickly get started or continue a project, without having to install Nargo locally. Think of it as a development environment in a box. + +There are many advantages to this: + +- It's platform and architecture agnostic +- You don't need to have an IDE installed, or Nargo, or use a terminal at all +- It's safer for using on a public machine or public network + +One of the best ways of using devcontainers is... not using your machine at all, for maximum control, performance, and ease of use. +Enter Codespaces. + +## Codespaces + +If a devcontainer is just a Docker image, then what stops you from provisioning a `p3dn.24xlarge` AWS EC2 instance with 92 vCPUs and 768 GiB RAM and using it to prove your 10-gate SNARK proof? + +Nothing! Except perhaps the 30-40$ per hour it will cost you. + +The problem is that provisioning takes time, and I bet you don't want to see the AWS console every time you want to code something real quick. + +Fortunately, there's an easy and free way to get a decent remote machine ready and loaded in less than 2 minutes: Codespaces. [Codespaces is a Github feature](https://github.com/features/codespaces) that allows you to code in a remote machine by using devcontainers, and it's pretty cool: + +- You can start coding Noir in less than a minute +- It uses the resources of a remote machine, so you can code on your grandma's phone if needed be +- It makes it easy to share work with your frens +- It's fully reusable, you can stop and restart whenever you need to + +:::info + +Don't take out your wallet just yet. Free GitHub accounts get about [15-60 hours of coding](https://github.com/features/codespaces) for free per month, depending on the size of your provisioned machine. + +::: + +## Tell me it's _actually_ easy + +It is! + +Github comes with a default codespace and you can use it to code your own devcontainer. That's exactly what we will be doing in this guide. + + + +8 simple steps: + +#### 1. Create a new repository on GitHub. + +#### 2. Click "Start coding with Codespaces". This will use the default image. + +#### 3. Create a folder called `.devcontainer` in the root of your repository. + +#### 4. Create a Dockerfile in that folder, and paste the following code: + +```docker +FROM --platform=linux/amd64 node:lts-bookworm-slim +SHELL ["/bin/bash", "-c"] +RUN apt update && apt install -y curl bash git tar gzip libc++-dev +RUN curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +ENV PATH="/root/.nargo/bin:$PATH" +RUN noirup +ENTRYPOINT ["nargo"] +``` +#### 5. Create a file called `devcontainer.json` in the same folder, and paste the following code: + +```json +{ + "name": "Noir on Codespaces", + "build": { + "context": ".", + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": ["noir-lang.vscode-noir"] + } + } +} +``` +#### 6. Commit and push your changes + +This will pull the new image and build it, so it could take a minute or so + +#### 8. Done! +Just wait for the build to finish, and there's your easy Noir environment. + + +Refer to [noir-starter](https://github.com/noir-lang/noir-starter/) as an example of how devcontainers can be used together with codespaces. + + + +## How do I use it? + +Using the codespace is obviously much easier than setting it up. +Just navigate to your repository and click "Code" -> "Open with Codespaces". It should take a few seconds to load, and you're ready to go. + +:::info + +If you really like the experience, you can add a badge to your readme, links to existing codespaces, and more. +Check out the [official docs](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/facilitating-quick-creation-and-resumption-of-codespaces) for more info. diff --git a/docs/versioned_docs/version-v0.32.0/index.mdx b/docs/versioned_docs/version-v0.32.0/index.mdx new file mode 100644 index 00000000000..a6bd306f91d --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/index.mdx @@ -0,0 +1,67 @@ +--- +title: Noir Lang +hide_title: true +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. +keywords: + [Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language] +sidebar_position: 0 +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Noir Logo + +Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. + +ZK programs are programs that can generate short proofs of statements without revealing all inputs to the statements. You can read more about Zero-Knowledge Proofs [here](https://dev.to/spalladino/a-beginners-intro-to-coding-zero-knowledge-proofs-c56). + +## What's new about Noir? + +Noir works differently from most ZK languages by taking a two-pronged path. First, it compiles the program to an adaptable intermediate language known as ACIR. From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with the proving backend. + +:::info + +Noir is backend agnostic, which means it makes no assumptions on which proving backend powers the ZK proof. Being the language that powers [Aztec Contracts](https://docs.aztec.network/developers/contracts/main), it defaults to Aztec's Barretenberg proving backend. + +However, the ACIR output can be transformed to be compatible with other PLONK-based backends, or into a [rank-1 constraint system](https://www.rareskills.io/post/rank-1-constraint-system) suitable for backends such as Arkwork's Marlin. + +::: + +## Who is Noir for? + +Noir can be used both in complex cloud-based backends and in user's smartphones, requiring no knowledge on the underlying math or cryptography. From authorization systems that keep a password in the user's device, to complex on-chain verification of recursive proofs, Noir is designed to abstract away complexity without any significant overhead. Here are some examples of situations where Noir can be used: + + + + Noir Logo + + Aztec Contracts leverage Noir to allow for the storage and execution of private information. Writing an Aztec Contract is as easy as writing Noir, and Aztec developers can easily interact with the network storage and execution through the [Aztec.nr](https://docs.aztec.network/developers/contracts/main) library. + + + Soliditry Verifier Example + Noir can auto-generate Solidity verifier contracts that verify Noir proofs. This allows for non-interactive verification of proofs containing private information in an immutable system. This feature powers a multitude of use-case scenarios, from P2P chess tournaments, to [Aztec Layer-2 Blockchain](https://docs.aztec.network/) + + + Aztec Labs developed NoirJS, an easy interface to generate and verify Noir proofs in a Javascript environment. This allows for Noir to be used in webpages, mobile apps, games, and any other environment supporting JS execution in a standalone manner. + + + + +## Libraries + +Noir is meant to be easy to extend by simply importing Noir libraries just like in Rust. +The [awesome-noir repo](https://github.com/noir-lang/awesome-noir#libraries) is a collection of libraries developed by the Noir community. +Writing a new library is easy and makes code be composable and easy to reuse. See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/docs/versioned_docs/version-v0.32.0/migration_notes.md b/docs/versioned_docs/version-v0.32.0/migration_notes.md new file mode 100644 index 00000000000..6bd740024e5 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/migration_notes.md @@ -0,0 +1,105 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +### `backend encountered an error: libc++.so.1` + +Depending on your OS, you may encounter the following error when running `nargo prove` for the first time: + +```text +The backend encountered an error: "/home/codespace/.nargo/backends/acvm-backend-barretenberg/backend_binary: error while loading shared libraries: libc++.so.1: cannot open shared object file: No such file or directory\n" +``` + +Install the `libc++-dev` library with: + +```bash +sudo apt install libc++-dev +``` + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/_category_.json b/docs/versioned_docs/version-v0.32.0/noir/concepts/_category_.json new file mode 100644 index 00000000000..7da08f8a8c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Concepts", + "position": 0, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/assert.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/assert.md new file mode 100644 index 00000000000..2132de42072 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/assert.md @@ -0,0 +1,78 @@ +--- +title: Assert Function +description: + Learn about the `assert` and `static_assert` functions in Noir, which can be used to explicitly + constrain the predicate or comparison expression that follows to be true, and what happens if + the expression is false at runtime or compile-time, respectively. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +Aside string literals, the optional message can be a format string or any other type supported as input for Noir's [print](../standard_library/logging.md) functions. This feature lets you incorporate runtime variables into your failed assertion logs: + +```rust +assert(x == y, f"Expected x == y, but got {x} == {y}"); +``` + +Using a variable as an assertion message directly: + +```rust +struct myStruct { + myField: Field +} + +let s = myStruct { myField: y }; +assert(s.myField == x, s); +``` + +There is also a special `static_assert` function that behaves like `assert`, +but that runs at compile-time. + +```rust +fn main(xs: [Field; 3]) { + let x = 2 + 2; + let y = 4; + static_assert(x == y, "expected 2 + 2 to equal 4"); + + // This passes since the length of `xs` is known at compile-time + static_assert(xs.len() == 3, "expected the input to have 3 elements"); +} +``` + +This function fails when passed a dynamic (run-time) argument: + +```rust +fn main(x : Field, y : Field) { + // this fails because `x` is not known at compile-time + static_assert(x == 2, "expected x to be known at compile-time and equal to 2"); + + let mut example_slice = &[]; + if y == 4 { + example_slice = example_slice.push_back(0); + } + + // This fails because the length of `example_slice` is not known at + // compile-time + let error_message = "expected an empty slice, known at compile-time"; + static_assert(example_slice.len() == 0, error_message); +} +``` + diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/comments.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/comments.md new file mode 100644 index 00000000000..b51a85f5c94 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 10 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/control_flow.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/control_flow.md new file mode 100644 index 00000000000..045d3c3a5f5 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/control_flow.md @@ -0,0 +1,77 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +} +``` + +The index for loops is of type `u64`. + +### Break and Continue + +In unconstrained code, `break` and `continue` are also allowed in `for` loops. These are only allowed +in unconstrained code since normal constrained code requires that Noir knows exactly how many iterations +a loop may have. `break` and `continue` can be used like so: + +```rust +for i in 0 .. 10 { + println("Iteration start") + + if i == 2 { + continue; + } + + if i == 5 { + break; + } + + println(i); +} +println("Loop end") +``` + +When used, `break` will end the current loop early and jump to the statement after the for loop. In the example +above, the `break` will stop the loop and jump to the `println("Loop end")`. + +`continue` will stop the current iteration of the loop, and jump to the start of the next iteration. In the example +above, `continue` will jump to `println("Iteration start")` when used. Note that the loop continues as normal after this. +The iteration variable `i` is still increased by one as normal when `continue` is used. + +`break` and `continue` cannot currently be used to jump out of more than a single loop at a time. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_bus.mdx b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_bus.mdx new file mode 100644 index 00000000000..e55e58622ce --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_bus.mdx @@ -0,0 +1,23 @@ +--- +title: Data Bus +sidebar_position: 13 +--- +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/_category_.json b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/arrays.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/arrays.md new file mode 100644 index 00000000000..9a4ab5d3c1f --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/arrays.md @@ -0,0 +1,253 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices.mdx), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` + +However, multidimensional slices are not supported. For example, the following code will error at compile time: + +```rust +let slice : [[Field]] = &[]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays. +Each of these functions are located within the generic impl `impl [T; N] {`. +So anywhere `self` appears, it refers to the variable `self: [T; N]`. + +### len + +Returns the length of an array + +```rust +fn len(self) -> Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(self) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(self, ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(self, f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as the starting element. + +```rust +fn reduce(self, f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/booleans.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/booleans.md new file mode 100644 index 00000000000..2507af710e7 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/booleans.md @@ -0,0 +1,28 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow.md) and +[Assert Function](../assert.md) sections. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/fields.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/fields.md new file mode 100644 index 00000000000..a10a4810788 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/fields.md @@ -0,0 +1,192 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### assert_max_bit_size + +Adds a constraint to specify that the field can be represented with `bit_size` number of bits + +```rust +fn assert_max_bit_size(self, bit_size: u32) +``` + +example: + +```rust +fn main() { + let field = 2 + field.assert_max_bit_size(32); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` + + +### lt + +Returns true if the field is less than the other field + +```rust +pub fn lt(self, another: Field) -> bool +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/function_types.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/function_types.md new file mode 100644 index 00000000000..f6121af17e2 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/index.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/index.md new file mode 100644 index 00000000000..3eadb2dc8a4 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/index.md @@ -0,0 +1,118 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](../generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can even refer to other aliases. An error will be issued if they form a cycle: + +```rust +// Ok! +type A = B; +type B = Field; + +type Bad1 = Bad2; + +// error: Dependency cycle found +type Bad2 = Bad1; +// ^^^^^^^^^^^ 'Bad2' recursively depends on itself: Bad2 -> Bad1 -> Bad2 +``` + +## Wildcard Type +Noir can usually infer the type of the variable from the context, so specifying the type of a variable is only required when it cannot be inferred. However, specifying a complex type can be tedious, especially when it has multiple generic arguments. Often some of the generic types can be inferred from the context, and Noir only needs a hint to properly infer the other types. We can partially specify a variable's type by using `_` as a marker, indicating where we still want the compiler to infer the type. + +```rust +let a: [_; 4] = foo(b); +``` + + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/integers.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/integers.md new file mode 100644 index 00000000000..a1d59bf3166 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/integers.md @@ -0,0 +1,156 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. +The Noir frontend supports both unsigned and signed integer types. +The allowed sizes are 1, 8, 16, 32 and 64 bits. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +## 128 bits Unsigned Integers + +The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: +- You cannot cast between a native integer and `U128` +- There is a higher performance cost when using `U128`, compared to a native type. + +Conversion between unsigned integer types and U128 are done through the use of `from_integer` and `to_integer` functions. `from_integer` also accepts the `Field` type as input. + +```rust +fn main() { + let x = U128::from_integer(23); + let y = U128::from_hex("0x7"); + let z = x + y; + assert(z.to_integer() == 30); +} +``` + +`U128` is implemented with two 64 bits limbs, representing the low and high bits, which explains the performance cost. You should expect `U128` to be twice more costly for addition and four times more costly for multiplication. +You can construct a U128 from its limbs: +```rust +fn main(x: u64, y: u64) { + let x = U128::from_u64s_be(x,y); + assert(z.hi == x as Field); + assert(z.lo == y as Field); +} +``` + +Note that the limbs are stored as Field elements in order to avoid unnecessary conversions. +Apart from this, most operations will work as usual: + +```rust +fn main(x: U128, y: U128) { + // multiplication + let c = x * y; + // addition and subtraction + let c = c - x + y; + // division + let c = x / y; + // bit operation; + let c = x & y | y; + // bit shift + let c = x << y; + // comparisons; + let c = x < y; + let c = x == y; +} +``` + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo execute +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x, y) +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/references.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/references.md new file mode 100644 index 00000000000..a5293d11cfb --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/slices.mdx b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/slices.mdx new file mode 100644 index 00000000000..95da2030843 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/slices.mdx @@ -0,0 +1,358 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +fn main() -> pub u32 { + let mut slice: [Field] = &[0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +To write a slice literal, use a preceeding ampersand as in: `&[0; 2]` or +`&[1, 2, 3]`. + +It is important to note that slices are not references to arrays. In Noir, +`&[..]` is more similar to an immutable, growable vector. + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = &[0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = &[]; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = &[1, 2].append(&[3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` + +### len + +Returns the length of a slice + +```rust +fn len(self) -> Field +``` + +Example: + +```rust +fn main() { + let slice = &[42, 42]; + assert(slice.len() == 2); +} +``` + +### as_array + +Converts this slice into an array. + +Make sure to specify the size of the resulting array. +Panics if the resulting array length is different than the slice's length. + +```rust +fn as_array(self) -> [T; N] +``` + +Example: + +```rust +fn main() { + let slice = &[5, 6]; + + // Always specify the length of the resulting array! + let array: [Field; 2] = slice.as_array(); + + assert(array[0] == slice[0]); + assert(array[1] == slice[1]); +} +``` + +### map + +Applies a function to each element of the slice, returning a new slice containing the mapped elements. + +```rust +fn map(self, f: fn[Env](T) -> U) -> [U] +``` + +example + +```rust +let a = &[1, 2, 3]; +let b = a.map(|a| a * 2); // b is now &[2, 4, 6] +``` + +### fold + +Applies a function to each element of the slice, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn[Env](U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the slice, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = &[1]; +let a2 = &[1, 2]; +let a3 = &[1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let folded = slice.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as the starting element. + +```rust +fn reduce(self, f: fn[Env](T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let reduced = slice.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### filter + +Returns a new slice containing only elements for which the given predicate returns true. + +```rust +fn filter(self, f: fn[Env](T) -> bool) -> Self +``` + +example: + +```rust +fn main() { + let slice = &[1, 2, 3, 4, 5]; + let odds = slice.filter(|x| x % 2 == 1); + assert_eq(odds, &[1, 3, 5]); +} +``` + +### join + +Flatten each element in the slice into one value, separated by `separator`. + +Note that although slices implement `Append`, `join` cannot be used on slice +elements since nested slices are prohibited. + +```rust +fn join(self, separator: T) -> T where T: Append +``` + +example: + +```rust +struct Accumulator { + total: Field, +} + +// "Append" two accumulators by adding them +impl Append for Accumulator { + fn empty() -> Self { + Self { total: 0 } + } + + fn append(self, other: Self) -> Self { + Self { total: self.total + other.total } + } +} + +fn main() { + let slice = &[1, 2, 3, 4, 5].map(|total| Accumulator { total }); + + let result = slice.join(Accumulator::empty()); + assert_eq(result, Accumulator { total: 15 }); + + // We can use a non-empty separator to insert additional elements to sum: + let separator = Accumulator { total: 10 }; + let result = slice.join(separator); + assert_eq(result, Accumulator { total: 55 }); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn[Env](T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let all = slice.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn[Env](T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 5]; + let any = slice.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/strings.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/strings.md new file mode 100644 index 00000000000..1fdee42425e --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/strings.md @@ -0,0 +1,79 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`println()`. See more about [Logging](../../standard_library/logging.md). + +```rust + +fn main(message : pub str<11>, hex_as_string : str<4>) { + println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/structs.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/structs.md new file mode 100644 index 00000000000..dbf68c99813 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/tuples.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/tuples.md new file mode 100644 index 00000000000..2ec5c9c4113 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/functions.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/functions.md new file mode 100644 index 00000000000..f656cdfd97a --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main(&[1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/generics.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/generics.md new file mode 100644 index 00000000000..0c1c27a2221 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/generics.md @@ -0,0 +1,106 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 7 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn print(self) { + for _i in 0 .. self.count { + println(self.value); + } + } +} + +fn main() { + let repeated = RepeatedValue { value: "Hello!", count: 2 }; + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Since a generic type `T` can represent any type, how can we call functions on the underlying type? +In other words, how can we go from "any type `T`" to "any type `T` that has certain methods available?" + +This is what [traits](../concepts/traits.md) are for in Noir. Here's an example of a function generic over +any type `T` that implements the `Eq` trait for equality: + +```rust +fn first_element_is_equal(array1: [T; N], array2: [T; N]) -> bool + where T: Eq +{ + if (array1.len() == 0) | (array2.len() == 0) { + true + } else { + array1[0] == array2[0] + } +} + +fn main() { + assert(first_element_is_equal([1, 2, 3], [1, 5, 6])); + + // We can use first_element_is_equal for arrays of any type + // as long as we have an Eq impl for the types we pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} + +impl Eq for MyStruct { + fn eq(self, other: MyStruct) -> bool { + self.foo == other.foo + } +} +``` + +You can find more details on traits and trait implementations on the [traits page](../concepts/traits.md). diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/globals.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/globals.md new file mode 100644 index 00000000000..063a3d89248 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/globals.md @@ -0,0 +1,72 @@ +--- +title: Global Variables +description: + Learn about global variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, globals, global variables, constants] +sidebar_position: 8 +--- + +## Globals + + +Noir supports global variables. The global's type can be inferred by the compiler entirely: + +```rust +global N = 5; // Same as `global N: Field = 5` + +global TUPLE = (3, 2); + +fn main() { + assert(N == 5); + assert(N == TUPLE.0 + TUPLE.1); +} +``` + +:::info + +Globals can be defined as any expression, so long as they don't depend on themselves - otherwise there would be a dependency cycle! For example: + +```rust +global T = foo(T); // dependency error +``` + +::: + + +If they are initialized to a literal integer, globals can be used to specify an array's length: + +```rust +global N: Field = 2; + +fn main(y : [Field; N]) { + assert(y[0] == y[1]) +} +``` + +A global from another module can be imported or referenced externally like any other name: + +```rust +global N = 20; + +fn main() { + assert(my_submodule::N != N); +} + +mod my_submodule { + global N: Field = 10; +} +``` + +When a global is used, Noir replaces the name with its definition on each occurrence. +This means globals defined using function calls will repeat the call each time they're used: + +```rust +global RESULT = foo(); + +fn foo() -> [Field; 100] { ... } +``` + +This is usually fine since Noir will generally optimize any function call that does not +refer to a program input into a constant. It should be kept in mind however, if the called +function performs side-effects like `println`, as these will still occur on each use. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/lambdas.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/lambdas.md new file mode 100644 index 00000000000..be3c7e0b5ca --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 9 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/mutability.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/mutability.md new file mode 100644 index 00000000000..fdeef6a87c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/mutability.md @@ -0,0 +1,121 @@ +--- +title: Mutability +description: + Learn about mutable variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables] +sidebar_position: 8 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Non-local mutability + +Non-local mutability can be achieved through the mutable reference type `&mut T`: + +```rust +fn set_to_zero(x: &mut Field) { + *x = 0; +} + +fn main() { + let mut y = 42; + set_to_zero(&mut y); + assert(*y == 0); +} +``` + +When creating a mutable reference, the original variable being referred to (`y` in this +example) must also be mutable. Since mutable references are a reference type, they must +be explicitly dereferenced via `*` to retrieve the underlying value. Note that this yields +a copy of the value, so mutating this copy will not change the original value behind the +reference: + +```rust +fn main() { + let mut x = 1; + let x_ref = &mut x; + + let mut y = *x_ref; + let y_ref = &mut y; + + x = 2; + *x_ref = 3; + + y = 4; + *y_ref = 5; + + assert(x == 3); + assert(*x_ref == 3); + assert(y == 5); + assert(*y_ref == 5); +} +``` + +Note that types in Noir are actually deeply immutable so the copy that occurs when +dereferencing is only a conceptual copy - no additional constraints will occur. + +Mutable references can also be stored within structs. Note that there is also +no lifetime parameter on these unlike rust. This is because the allocated memory +always lasts the entire program - as if it were an array of one element. + +```rust +struct Foo { + x: &mut Field +} + +impl Foo { + fn incr(mut self) { + *self.x += 1; + } +} + +fn main() { + let foo = Foo { x: &mut 0 }; + foo.incr(); + assert(*foo.x == 1); +} +``` + +In general, you should avoid non-local & shared mutability unless it is needed. Sticking +to only local mutability will improve readability and potentially improve compiler optimizations as well. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/ops.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/ops.md new file mode 100644 index 00000000000..c35c36c38a9 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer, shift must be u8 | +| >> | Right shift an integer by another integer amount | Types must be integer, shift must be u8 | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/oracles.mdx b/docs/versioned_docs/version-v0.32.0/noir/concepts/oracles.mdx new file mode 100644 index 00000000000..77a2ac1550a --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/oracles.mdx @@ -0,0 +1,29 @@ +--- +title: Oracles +description: Dive into how Noir supports Oracles via RPC calls, and learn how to declare an Oracle in Noir with our comprehensive guide. +keywords: + - Noir + - Oracles + - RPC Calls + - Unconstrained Functions + - Programming + - Blockchain +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. + +Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) + +You can declare an Oracle through the `#[oracle()]` flag. Example: + +```rust +#[oracle(get_number_sequence)] +unconstrained fn get_number_sequence(_size: Field) -> [Field] {} +``` + +The timeout for when using an external RPC oracle resolver can be set with the `NARGO_FOREIGN_CALL_TIMEOUT` environment variable. This timeout is in units of milliseconds. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/shadowing.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/shadowing.md new file mode 100644 index 00000000000..5ce6130d201 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 12 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/traits.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/traits.md new file mode 100644 index 00000000000..51305b38c16 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/traits.md @@ -0,0 +1,405 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +sidebar_position: 14 +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +Where clauses can also be placed on struct implementations. +For example, here is a method utilizing a generic type that implements the equality trait. + +```rust +struct Foo { + a: u32, + b: T, +} + +impl Foo where T: Eq { + fn eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.eq(other.b) + } +} +``` + +## Generic Traits + +Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in +scope of every item within the trait. + +```rust +trait Into { + // Convert `self` to type `T` + fn into(self) -> T; +} +``` + +When implementing generic traits the generic arguments of the trait must be specified. This is also true anytime +when referencing a generic trait (e.g. in a `where` clause). + +```rust +struct MyStruct { + array: [Field; 2], +} + +impl Into<[Field; 2]> for MyStruct { + fn into(self) -> [Field; 2] { + self.array + } +} + +fn as_array(x: T) -> [Field; 2] + where T: Into<[Field; 2]> +{ + x.into() +} + +fn main() { + let array = [1, 2]; + let my_struct = MyStruct { array }; + + assert_eq(as_array(my_struct), array); +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/docs/versioned_docs/version-v0.32.0/noir/concepts/unconstrained.md b/docs/versioned_docs/version-v0.32.0/noir/concepts/unconstrained.md new file mode 100644 index 00000000000..96f824c5e42 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/concepts/unconstrained.md @@ -0,0 +1,99 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the AND against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. + +## Break and Continue + +In addition to loops over runtime bounds, `break` and `continue` are also available in unconstrained code. See [break and continue](../concepts/control_flow.md#break-and-continue) diff --git a/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/_category_.json b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/_category_.json new file mode 100644 index 00000000000..1debcfe7675 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..95ee9f52ab2 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/noir-projects/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..24e02de08fe --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use ecrecover; +use lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use std::hash::sha256; +use std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives.md#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces.md) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/modules.md new file mode 100644 index 00000000000..16b6307d2fd --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/modules.md @@ -0,0 +1,185 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +The module filename may also be the name of the module as a directory with the contents in a +file named `mod.nr` within that directory. The above example can alternatively be expressed like this: + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo/mod.nr` + +```rust +fn from_foo() {} +``` + +Note that it's an error to have both files `src/foo.nr` and `src/foo/mod.nr` in the filesystem. + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` + +Similar to importing a module in the crate root, modules can be placed in a `mod.nr` file, like this: + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo/mod.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar/mod.nr` + +```rust +fn from_bar() {} +``` + +### Referencing a parent module + +Given a submodule, you can refer to its parent module using the `super` keyword. + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; + +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +// Same as bar::from_foo +use super::from_foo; + +fn from_bar() { + from_foo(); // invokes super::from_foo(), which is bar::from_foo() + super::from_foo(); // also invokes bar::from_foo() +} +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..513497f12bf --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,42 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│ ├── a +│ │ ├── Nargo.toml +│ │ └── Prover.toml +│ │ └── src +│ │ └── main.nr +│ └── b +│ ├── Nargo.toml +│ └── Prover.toml +│ └── src +│ └── main.nr +│ +└── Nargo.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/_category_.json b/docs/versioned_docs/version-v0.32.0/noir/standard_library/_category_.json new file mode 100644 index 00000000000..af04c0933fd --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/bigint.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/bigint.md new file mode 100644 index 00000000000..2bfdeec6631 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/bigint.md @@ -0,0 +1,122 @@ +--- +title: Big Integers +description: How to use big integers from Noir standard library +keywords: + [ + Big Integer, + Noir programming language, + Noir libraries, + ] +--- + +The BigInt module in the standard library exposes some class of integers which do not fit (well) into a Noir native field. It implements modulo arithmetic, modulo a 'big' prime number. + +:::note + +The module can currently be considered as `Field`s with fixed modulo sizes used by a set of elliptic curves, in addition to just the native curve. [More work](https://github.com/noir-lang/noir/issues/510) is needed to achieve arbitrarily sized big integers. + +::: + +Currently 6 classes of integers (i.e 'big' prime numbers) are available in the module, namely: + +- BN254 Fq: Bn254Fq +- BN254 Fr: Bn254Fr +- Secp256k1 Fq: Secpk1Fq +- Secp256k1 Fr: Secpk1Fr +- Secp256r1 Fr: Secpr1Fr +- Secp256r1 Fq: Secpr1Fq + +Where XXX Fq and XXX Fr denote respectively the order of the base and scalar field of the (usual) elliptic curve XXX. +For instance the big integer 'Secpk1Fq' in the standard library refers to integers modulo $2^{256}-2^{32}-977$. + +Feel free to explore the source code for the other primes: + +```rust title="big_int_definition" showLineNumbers +struct BigInt { + pointer: u32, + modulus: u32, +} +``` +> Source code: noir_stdlib/src/bigint.nr#L14-L19 + + +## Example usage + +A common use-case is when constructing a big integer from its bytes representation, and performing arithmetic operations on it: + +```rust title="big_int_example" showLineNumbers +fn big_int_example(x: u8, y: u8) { + let a = Secpk1Fq::from_le_bytes(&[x, y, 0, 45, 2]); + let b = Secpk1Fq::from_le_bytes(&[y, x, 9]); + let c = (a + b) * b / a; + let d = c.to_le_bytes(); + println(d[0]); +} +``` +> Source code: test_programs/execution_success/bigint/src/main.nr#L70-L78 + + +## Methods + +The available operations for each big integer are: + +### from_le_bytes + +Construct a big integer from its little-endian bytes representation. Example: + +```rust + // Construct a big integer from a slice of bytes + let a = Secpk1Fq::from_le_bytes(&[x, y, 0, 45, 2]); + // Construct a big integer from an array of 32 bytes + let a = Secpk1Fq::from_le_bytes_32([1;32]); + ``` + +Sure, here's the formatted version of the remaining methods: + +### to_le_bytes + +Return the little-endian bytes representation of a big integer. Example: + +```rust +let bytes = a.to_le_bytes(); +``` + +### add + +Add two big integers. Example: + +```rust +let sum = a + b; +``` + +### sub + +Subtract two big integers. Example: + +```rust +let difference = a - b; +``` + +### mul + +Multiply two big integers. Example: + +```rust +let product = a * b; +``` + +### div + +Divide two big integers. Note that division is field division and not euclidean division. Example: + +```rust +let quotient = a / b; +``` + +### eq + +Compare two big integers. Example: + +```rust +let are_equal = a == b; +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/black_box_fns.md new file mode 100644 index 00000000000..d5694250f05 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/black_box_fns.md @@ -0,0 +1,32 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +The ACVM spec defines a set of blackbox functions which backends will be expected to implement. This allows backends to use optimized implementations of these constraints if they have them, however they may also fallback to less efficient naive implementations if not. + +## Function list + +Here is a list of the current black box functions: + +- [AES128](./cryptographic_primitives/ciphers.mdx#aes128) +- [SHA256](./cryptographic_primitives/hashes.mdx#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) +- [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) +- [Blake3](./cryptographic_primitives/hashes.mdx#blake3) +- [Pedersen Hash](./cryptographic_primitives/hashes.mdx#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes.mdx#pedersen_commitment) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification.mdx) +- [Embedded curve operations (MSM, addition, ...)](./cryptographic_primitives/embedded_curve_ops.mdx) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes.mdx#keccak256) +- [Recursive proof verification](./recursion.md) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/bn254.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/bn254.md new file mode 100644 index 00000000000..3294f005dbb --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/bn254.md @@ -0,0 +1,46 @@ +--- +title: Bn254 Field Library +--- + +Noir provides a module in standard library with some optimized functions for bn254 Fr in `std::field::bn254`. + +## decompose + +```rust +fn decompose(x: Field) -> (Field, Field) {} +``` + +Decomposes a single field into two fields, low and high. The low field contains the lower 16 bytes of the input field and the high field contains the upper 16 bytes of the input field. Both field results are range checked to 128 bits. + + +## assert_gt + +```rust +fn assert_gt(a: Field, b: Field) {} +``` + +Asserts that a > b. This will generate less constraints than using `assert(gt(a, b))`. + +## assert_lt + +```rust +fn assert_lt(a: Field, b: Field) {} +``` + +Asserts that a < b. This will generate less constraints than using `assert(lt(a, b))`. + +## gt + +```rust +fn gt(a: Field, b: Field) -> bool {} +``` + +Returns true if a > b. + +## lt + +```rust +fn lt(a: Field, b: Field) -> bool {} +``` + +Returns true if a < b. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/boundedvec.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/boundedvec.md new file mode 100644 index 00000000000..604d84d5ba4 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/boundedvec.md @@ -0,0 +1,419 @@ +--- +title: Bounded Vectors +keywords: [noir, vector, bounded vector, slice] +sidebar_position: 1 +--- + +A `BoundedVec` is a growable storage similar to a `Vec` except that it +is bounded with a maximum possible length. Unlike `Vec`, `BoundedVec` is not implemented +via slices and thus is not subject to the same restrictions slices are (notably, nested +slices - and thus nested vectors as well - are disallowed). + +Since a BoundedVec is backed by a normal array under the hood, growing the BoundedVec by +pushing an additional element is also more efficient - the length only needs to be increased +by one. + +For these reasons `BoundedVec` should generally be preferred over `Vec` when there +is a reasonable maximum bound that can be placed on the vector. + +Example: + +```rust +let mut vector: BoundedVec = BoundedVec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +assert(vector.max_len() == 10); +``` + +## Methods + +### new + +```rust +pub fn new() -> Self +``` + +Creates a new, empty vector of length zero. + +Since this container is backed by an array internally, it still needs an initial value +to give each element. To resolve this, each element is zeroed internally. This value +is guaranteed to be inaccessible unless `get_unchecked` is used. + +Example: + +```rust +let empty_vector: BoundedVec = BoundedVec::new(); +assert(empty_vector.len() == 0); +``` + +Note that whenever calling `new` the maximum length of the vector should always be specified +via a type signature: + +```rust title="new_example" showLineNumbers +fn foo() -> BoundedVec { + // Ok! MaxLen is specified with a type annotation + let v1: BoundedVec = BoundedVec::new(); + let v2 = BoundedVec::new(); + + // Ok! MaxLen is known from the type of foo's return value + v2 +} + +fn bad() { + let mut v3 = BoundedVec::new(); + + // Not Ok! We don't know if v3's MaxLen is at least 1, and the compiler often infers 0 by default. + v3.push(5); +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L11-L27 + + +This defaulting of `MaxLen` (and numeric generics in general) to zero may change in future noir versions +but for now make sure to use type annotations when using bounded vectors. Otherwise, you will receive a constraint failure at runtime when the vec is pushed to. + +### get + +```rust +pub fn get(self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero. + +If the given index is equal to or greater than the length of the vector, this +will issue a constraint failure. + +Example: + +```rust +fn foo(v: BoundedVec) { + let first = v.get(0); + let last = v.get(v.len() - 1); + assert(first != last); +} +``` + +### get_unchecked + +```rust +pub fn get_unchecked(self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero, without +performing a bounds check. + +Since this function does not perform a bounds check on length before accessing the element, +it is unsafe! Use at your own risk! + +Example: + +```rust title="get_unchecked_example" showLineNumbers +fn sum_of_first_three(v: BoundedVec) -> u32 { + // Always ensure the length is larger than the largest + // index passed to get_unchecked + assert(v.len() > 2); + let first = v.get_unchecked(0); + let second = v.get_unchecked(1); + let third = v.get_unchecked(2); + first + second + third +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L54-L64 + + +### set + +```rust +pub fn set(&mut self: Self, index: u64, value: T) { +``` + +Writes an element to the vector at the given index, starting from zero. + +If the given index is equal to or greater than the length of the vector, this will issue a constraint failure. + +Example: + +```rust +fn foo(v: BoundedVec) { + let first = v.get(0); + assert(first != 42); + v.set(0, 42); + let new_first = v.get(0); + assert(new_first == 42); +} +``` + +### set_unchecked + +```rust +pub fn set_unchecked(&mut self: Self, index: u64, value: T) -> T { +``` + +Writes an element to the vector at the given index, starting from zero, without performing a bounds check. + +Since this function does not perform a bounds check on length before accessing the element, it is unsafe! Use at your own risk! + +Example: + +```rust title="set_unchecked_example" showLineNumbers +fn set_unchecked_example() { + let mut vec: BoundedVec = BoundedVec::new(); + vec.extend_from_array([1, 2]); + + // Here we're safely writing within the valid range of `vec` + // `vec` now has the value [42, 2] + vec.set_unchecked(0, 42); + + // We can then safely read this value back out of `vec`. + // Notice that we use the checked version of `get` which would prevent reading unsafe values. + assert_eq(vec.get(0), 42); + + // We've now written past the end of `vec`. + // As this index is still within the maximum potential length of `v`, + // it won't cause a constraint failure. + vec.set_unchecked(2, 42); + println(vec); + + // This will write past the end of the maximum potential length of `vec`, + // it will then trigger a constraint failure. + vec.set_unchecked(5, 42); + println(vec); +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L67-L91 + + + +### push + +```rust +pub fn push(&mut self, elem: T) { +``` + +Pushes an element to the end of the vector. This increases the length +of the vector by one. + +Panics if the new length of the vector will be greater than the max length. + +Example: + +```rust title="bounded-vec-push-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + v.push(1); + v.push(2); + + // Panics with failed assertion "push out of bounds" + v.push(3); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L95-L103 + + +### pop + +```rust +pub fn pop(&mut self) -> T +``` + +Pops the element at the end of the vector. This will decrease the length +of the vector by one. + +Panics if the vector is empty. + +Example: + +```rust title="bounded-vec-pop-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.push(1); + v.push(2); + + let two = v.pop(); + let one = v.pop(); + + assert(two == 2); + assert(one == 1); + // error: cannot pop from an empty vector + // let _ = v.pop(); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L108-L120 + + +### len + +```rust +pub fn len(self) -> u64 { +``` + +Returns the current length of this vector + +Example: + +```rust title="bounded-vec-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + assert(v.len() == 0); + + v.push(100); + assert(v.len() == 1); + + v.push(200); + v.push(300); + v.push(400); + assert(v.len() == 4); + + let _ = v.pop(); + let _ = v.pop(); + assert(v.len() == 2); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L125-L140 + + +### max_len + +```rust +pub fn max_len(_self: BoundedVec) -> u64 { +``` + +Returns the maximum length of this vector. This is always +equal to the `MaxLen` parameter this vector was initialized with. + +Example: + +```rust title="bounded-vec-max-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.max_len() == 5); + v.push(10); + assert(v.max_len() == 5); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L145-L151 + + +### storage + +```rust +pub fn storage(self) -> [T; MaxLen] { +``` + +Returns the internal array within this vector. +Since arrays in Noir are immutable, mutating the returned storage array will not mutate +the storage held internally by this vector. + +Note that uninitialized elements may be zeroed out! + +Example: + +```rust title="bounded-vec-storage-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.storage() == [0, 0, 0, 0, 0]); + + v.push(57); + assert(v.storage() == [57, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L156-L163 + + +### extend_from_array + +```rust +pub fn extend_from_array(&mut self, array: [T; Len]) +``` + +Pushes each element from the given array to this vector. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-array-example" showLineNumbers +let mut vec: BoundedVec = BoundedVec::new(); + vec.extend_from_array([2, 4]); + + assert(vec.len == 2); + assert(vec.get(0) == 2); + assert(vec.get(1) == 4); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L168-L175 + + +### extend_from_bounded_vec + +```rust +pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) +``` + +Pushes each element from the other vector to this vector. The length of +the other vector is left unchanged. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-bounded-vec-example" showLineNumbers +let mut v1: BoundedVec = BoundedVec::new(); + let mut v2: BoundedVec = BoundedVec::new(); + + v2.extend_from_array([1, 2, 3]); + v1.extend_from_bounded_vec(v2); + + assert(v1.storage() == [1, 2, 3, 0, 0]); + assert(v2.storage() == [1, 2, 3, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L180-L189 + + +### from_array + +```rust +pub fn from_array(array: [T; Len]) -> Self +``` + +Creates a new vector, populating it with values derived from an array input. +The maximum length of the vector is determined based on the type signature. + +Example: +```rust +let bounded_vec: BoundedVec = BoundedVec::from_array([1, 2, 3]) +``` + +### map + +```rust +pub fn map(self, f: fn[Env](T) -> U) -> BoundedVec +``` + +Creates a new vector of equal size by calling a closure on each element in this vector. + +Example: + +```rust title="bounded-vec-map-example" showLineNumbers +let vec: BoundedVec = BoundedVec::from_array([1, 2, 3, 4]); + let result = vec.map(|value| value * 2); +``` +> Source code: noir_stdlib/src/collections/bounded_vec.nr#L205-L208 + + +### any + +```rust +pub fn any(self, predicate: fn[Env](T) -> bool) -> bool +``` + +Returns true if the given predicate returns true for any element +in this vector. + +Example: + +```rust title="bounded-vec-any-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.extend_from_array([2, 4, 6]); + + let all_even = !v.any(|elem: u32| elem % 2 != 0); + assert(all_even); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L256-L262 + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/hashmap.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/hashmap.md new file mode 100644 index 00000000000..8c50c7e774c --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/hashmap.md @@ -0,0 +1,570 @@ +--- +title: HashMap +keywords: [noir, map, hash, hashmap] +sidebar_position: 1 +--- + +`HashMap` is used to efficiently store and look up key-value pairs. + +`HashMap` is a bounded type which can store anywhere from zero to `MaxLen` total elements. +Note that due to hash collisions, the actual maximum number of elements stored by any particular +hashmap is likely lower than `MaxLen`. This is true even with cryptographic hash functions since +every hash value will be performed modulo `MaxLen`. + +When creating `HashMap`s, the `MaxLen` generic should always be specified if it is not already +known. Otherwise, the compiler may infer a different value for `MaxLen` (such as zero), which +will likely change the result of the program. This behavior is set to become an error in future +versions instead. + +Example: + +```rust +// Create a mapping from Fields to u32s with a maximum length of 12 +// using a poseidon2 hasher +use std::hash::poseidon2::Poseidon2Hasher; +let mut map: HashMap> = HashMap::default(); + +map.insert(1, 2); +map.insert(3, 4); + +let two = map.get(1).unwrap(); +``` + +## Methods + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Creates a fresh, empty HashMap. + +When using this function, always make sure to specify the maximum size of the hash map. + +This is the same `default` from the `Default` implementation given further below. It is +repeated here for convenience since it is the recommended way to create a hashmap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L201-L204 + + +Because `HashMap` has so many generic arguments that are likely to be the same throughout +your program, it may be helpful to create a type alias: + +```rust title="type_alias" showLineNumbers +type MyMap = HashMap>; +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L195-L197 + + +### with_hasher + +```rust title="with_hasher" showLineNumbers +pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L82-L86 + + +Creates a hashmap with an existing `BuildHasher`. This can be used to ensure multiple +hashmaps are created with the same hasher instance. + +Example: + +```rust title="with_hasher_example" showLineNumbers +let my_hasher: BuildHasherDefault = Default::default(); + let hashmap: HashMap> = HashMap::with_hasher(my_hasher); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L206-L210 + + +### get + +```rust title="get" showLineNumbers +pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L278-L287 + + +Retrieves a value from the hashmap, returning `Option::none()` if it was not found. + +Example: + +```rust title="get_example" showLineNumbers +fn get_example(map: HashMap>) { + let x = map.get(12); + + if x.is_some() { + assert(x.unwrap() == 42); + } +} +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L298-L306 + + +### insert + +```rust title="insert" showLineNumbers +pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L313-L323 + + +Inserts a new key-value pair into the map. If the key was already in the map, its +previous value will be overridden with the newly provided one. + +Example: + +```rust title="insert_example" showLineNumbers +let mut map: HashMap> = HashMap::default(); + map.insert(12, 42); + assert(map.len() == 1); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L212-L216 + + +### remove + +```rust title="remove" showLineNumbers +pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L356-L365 + + +Removes the given key-value pair from the map. If the key was not already present +in the map, this does nothing. + +Example: + +```rust title="remove_example" showLineNumbers +map.remove(12); + assert(map.is_empty()); + + // If a key was not present in the map, remove does nothing + map.remove(12); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L220-L227 + + +### is_empty + +```rust title="is_empty" showLineNumbers +pub fn is_empty(self) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L115-L117 + + +True if the length of the hash map is empty. + +Example: + +```rust title="is_empty_example" showLineNumbers +assert(map.is_empty()); + + map.insert(1, 2); + assert(!map.is_empty()); + + map.remove(1); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L229-L237 + + +### len + +```rust title="len" showLineNumbers +pub fn len(self) -> u32 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L264-L266 + + +Returns the current length of this hash map. + +Example: + +```rust title="len_example" showLineNumbers +// This is equivalent to checking map.is_empty() + assert(map.len() == 0); + + map.insert(1, 2); + map.insert(3, 4); + map.insert(5, 6); + assert(map.len() == 3); + + // 3 was already present as a key in the hash map, so the length is unchanged + map.insert(3, 7); + assert(map.len() == 3); + + map.remove(1); + assert(map.len() == 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L239-L254 + + +### capacity + +```rust title="capacity" showLineNumbers +pub fn capacity(_self: Self) -> u32 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L271-L273 + + +Returns the maximum capacity of this hashmap. This is always equal to the capacity +specified in the hashmap's type. + +Unlike hashmaps in general purpose programming languages, hashmaps in Noir have a +static capacity that does not increase as the map grows larger. Thus, this capacity +is also the maximum possible element count that can be inserted into the hashmap. +Due to hash collisions (modulo the hashmap length), it is likely the actual maximum +element count will be lower than the full capacity. + +Example: + +```rust title="capacity_example" showLineNumbers +let empty_map: HashMap> = HashMap::default(); + assert(empty_map.len() == 0); + assert(empty_map.capacity() == 42); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L256-L260 + + +### clear + +```rust title="clear" showLineNumbers +pub fn clear(&mut self) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L93-L95 + + +Clears the hashmap, removing all key-value pairs from it. + +Example: + +```rust title="clear_example" showLineNumbers +assert(!map.is_empty()); + map.clear(); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L262-L266 + + +### contains_key + +```rust title="contains_key" showLineNumbers +pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L101-L110 + + +True if the hashmap contains the given key. Unlike `get`, this will not also return +the value associated with the key. + +Example: + +```rust title="contains_key_example" showLineNumbers +if map.contains_key(7) { + let value = map.get(7); + assert(value.is_some()); + } else { + println("No value for key 7!"); + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L268-L275 + + +### entries + +```rust title="entries" showLineNumbers +pub fn entries(self) -> BoundedVec<(K, V), N> { +``` +> Source code: noir_stdlib/src/collections/map.nr#L123-L125 + + +Returns a vector of each key-value pair present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="entries_example" showLineNumbers +let entries = map.entries(); + + // The length of a hashmap may not be compile-time known, so we + // need to loop over its capacity instead + for i in 0..map.capacity() { + if i < entries.len() { + let (key, value) = entries.get(i); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L309-L320 + + +### keys + +```rust title="keys" showLineNumbers +pub fn keys(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L144-L146 + + +Returns a vector of each key present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="keys_example" showLineNumbers +let keys = map.keys(); + + for i in 0..keys.max_len() { + if i < keys.len() { + let key = keys.get_unchecked(i); + let value = map.get(key).unwrap_unchecked(); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L322-L332 + + +### values + +```rust title="values" showLineNumbers +pub fn values(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L164-L166 + + +Returns a vector of each value present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="values_example" showLineNumbers +let values = map.values(); + + for i in 0..values.max_len() { + if i < values.len() { + let value = values.get_unchecked(i); + println(f"Found value {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L334-L343 + + +### iter_mut + +```rust title="iter_mut" showLineNumbers +pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L183-L192 + + +Iterates through each key-value pair of the HashMap, setting each key-value pair to the +result returned from the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If this is not desired, use `iter_values_mut` if only values need to be mutated, +or `entries` if neither keys nor values need to be mutated. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_mut_example" showLineNumbers +// Add 1 to each key in the map, and double the value associated with that key. + map.iter_mut(|k, v| (k + 1, v * 2)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L347-L350 + + +### iter_keys_mut + +```rust title="iter_keys_mut" showLineNumbers +pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L208-L217 + + +Iterates through the HashMap, mutating each key to the result returned from +the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If only iteration is desired and the keys are not intended to be mutated, +prefer using `entries` instead. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_keys_mut_example" showLineNumbers +// Double each key, leaving the value associated with that key untouched + map.iter_keys_mut(|k| k * 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L352-L355 + + +### iter_values_mut + +```rust title="iter_values_mut" showLineNumbers +pub fn iter_values_mut(&mut self, f: fn(V) -> V) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L233-L235 + + +Iterates through the HashMap, applying the given function to each value and mutating the +value to equal the result. This function is more efficient than `iter_mut` and `iter_keys_mut` +because the keys are untouched and the underlying hashmap thus does not need to be reordered. + +Example: + +```rust title="iter_values_mut_example" showLineNumbers +// Halve each value + map.iter_values_mut(|v| v / 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L357-L360 + + +### retain + +```rust title="retain" showLineNumbers +pub fn retain(&mut self, f: fn(K, V) -> bool) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L247-L249 + + +Retains only the key-value pairs for which the given function returns true. +Any key-value pairs for which the function returns false will be removed from the map. + +Example: + +```rust title="retain_example" showLineNumbers +map.retain(|k, v| (k != 0) & (v != 0)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L280-L282 + + +## Trait Implementations + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Constructs an empty HashMap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L201-L204 + + +### eq + +```rust title="eq" showLineNumbers +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L426-L435 + + +Checks if two HashMaps are equal. + +Example: + +```rust title="eq_example" showLineNumbers +let mut map1: HashMap> = HashMap::default(); + let mut map2: HashMap> = HashMap::default(); + + map1.insert(1, 2); + map1.insert(3, 4); + + map2.insert(3, 4); + map2.insert(1, 2); + + assert(map1 == map2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L284-L295 + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/index.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/index.md new file mode 100644 index 00000000000..ea84c6d5c21 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/index.md @@ -0,0 +1,5 @@ +--- +title: Containers +description: Container types provided by Noir's standard library for storing and retrieving data +keywords: [containers, data types, vec, hashmap] +--- diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/vec.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/vec.mdx new file mode 100644 index 00000000000..475011922f8 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/containers/vec.mdx @@ -0,0 +1,170 @@ +--- +title: Vectors +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. + +Example: + +```rust +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self +``` + +Example: + +```rust +let slice: [Field] = &[1, 2, 3]; +let vector_from_slice = Vec::from_slice(slice); +assert(vector_from_slice.len() == 3); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice(&[10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### set + +```rust +pub fn set(&mut self: Self, index: u64, value: T) { +``` + +Writes an element to the vector at the given index, starting from zero. + +Panics if the index points beyond the vector's end. + +Example: + +```rust +let vector: Vec = Vec::from_slice(&[10, 20, 30]); +assert(vector.get(1) == 20); +vector.set(1, 42); +assert(vector.get(1) == 42); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/_category_.json b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ciphers.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ciphers.mdx new file mode 100644 index 00000000000..d75e50d4b89 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ciphers.mdx @@ -0,0 +1,32 @@ +--- +title: Ciphers +description: + Learn about the implemented ciphers ready to use for any Noir project +keywords: + [ciphers, Noir project, aes128, encrypt] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## aes128 + +Given a plaintext as an array of bytes, returns the corresponding aes128 ciphertext (CBC mode). Input padding is automatically performed using PKCS#7, so that the output length is `input.len() + (16 - input.len() % 16)`. + +```rust title="aes128" showLineNumbers +pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} +``` +> Source code: noir_stdlib/src/aes128.nr#L2-L4 + + +```rust +fn main() { + let input: [u8; 4] = [0, 12, 3, 15] // Random bytes, will be padded to 16 bytes. + let iv: [u8; 16] = [0; 16]; // Initialisation vector + let key: [u8; 16] = [0; 16] // AES key + let ciphertext = std::aes128::aes128_encrypt(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); // In this case, the output length will be 16 bytes. +} +``` + + + \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 00000000000..f262d8160d6 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec/mod.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec/mod.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec/mod.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..8520071e95f --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,98 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures. +See ecdsa_secp256k1::verify_signature_slice for a version that accepts slices directly. + +```rust title="ecdsa_secp256k1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + + +## ecdsa_secp256k1::verify_signature_slice + +Verifier for ECDSA Secp256k1 signatures where the message is a slice. + +```rust title="ecdsa_secp256k1_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L13-L20 + + + + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures. +See ecdsa_secp256r1::verify_signature_slice for a version that accepts slices directly. + +```rust title="ecdsa_secp256r1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures where the message is a slice. + +```rust title="ecdsa_secp256r1_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L13-L20 + + + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/eddsa.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 00000000000..1ad42a5ac96 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,37 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + +It is also possible to specify the hash algorithm used for the signature by using the `eddsa_verify` function by passing a type implementing the Hasher trait with the turbofish operator. +For instance, if you want to use Poseidon2 instead, you can do the following: +```rust +use std::hash::poseidon2::Poseidon2Hasher; + +eddsa_verify::(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg); +``` + + + +## eddsa::eddsa_to_pub + +Private to public key conversion. + +Returns `(pub_key_x, pub_key_y)` + +```rust +fn eddsa_to_pub(secret : Field) -> (Field, Field) +``` + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx new file mode 100644 index 00000000000..0230f6a8ab9 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/embedded_curve_ops.mdx @@ -0,0 +1,98 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplication in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +The following functions perform operations over the embedded curve whose coordinates are defined by the configured noir field. +For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +:::note +Suffixes `_low` and `_high` denote low and high limbs of a scalar. +::: + +## embedded_curve_ops::multi_scalar_mul + +Performs multi scalar multiplication over the embedded curve. +The function accepts arbitrary amount of point-scalar pairs on the input, it multiplies the individual pairs over +the curve and returns a sum of the resulting points. + +Points represented as x and y coordinates [x1, y1, x2, y2, ...], scalars as low and high limbs [low1, high1, low2, high2, ...]. + +```rust title="multi_scalar_mul" showLineNumbers +pub fn multi_scalar_mul( + points: [EmbeddedCurvePoint; N], + scalars: [EmbeddedCurveScalar; N] +) -> [Field; 3] +``` +> Source code: noir_stdlib/src/embedded_curve_ops.nr#L92-L97 + + +example + +```rust +fn main(point_x: Field, point_y: Field, scalar_low: Field, scalar_high: Field) { + let point = std::embedded_curve_ops::multi_scalar_mul([point_x, point_y], [scalar_low, scalar_high]); + println(point); +} +``` + +## embedded_curve_ops::fixed_base_scalar_mul + +Performs fixed base scalar multiplication over the embedded curve (multiplies input scalar with a generator point). +The function accepts a single scalar on the input represented as 2 fields. + +```rust title="fixed_base_scalar_mul" showLineNumbers +pub fn fixed_base_scalar_mul( + scalar_low: Field, + scalar_high: Field +) -> [Field; 3] +``` +> Source code: noir_stdlib/src/embedded_curve_ops.nr#L103-L108 + + +example + +```rust +fn main(scalar_low: Field, scalar_high: Field) { + let point = std::embedded_curve_ops::fixed_base_scalar_mul(scalar_low, scalar_high); + println(point); +} +``` + +## embedded_curve_ops::embedded_curve_add + +Adds two points on the embedded curve. +This function takes two `EmbeddedCurvePoint` structures as parameters, representing points on the curve, and returns a new `EmbeddedCurvePoint` structure that represents their sum. + +### Parameters: +- `point1` (`EmbeddedCurvePoint`): The first point to add. +- `point2` (`EmbeddedCurvePoint`): The second point to add. + +### Returns: +- `EmbeddedCurvePoint`: The resulting point after the addition of `point1` and `point2`. + +```rust title="embedded_curve_add" showLineNumbers +fn embedded_curve_add( + point1: EmbeddedCurvePoint, + point2: EmbeddedCurvePoint +) -> EmbeddedCurvePoint +``` +> Source code: noir_stdlib/src/embedded_curve_ops.nr#L117-L122 + + +example + +```rust +fn main() { + let point1 = EmbeddedCurvePoint { x: 1, y: 2 }; + let point2 = EmbeddedCurvePoint { x: 3, y: 4 }; + let result = std::embedded_curve_ops::embedded_curve_add(point1, point2); + println!("Resulting Point: ({}, {})", result.x, result.y); +} +``` + + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 00000000000..dadff87bb69 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,253 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. +Specify a message_size to hash only the first `message_size` bytes of the input. + +```rust title="sha256" showLineNumbers +pub fn sha256(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash/mod.nr#L13-L15 + + +example: +```rust title="sha256_var" showLineNumbers +let digest = std::hash::sha256_var([x as u8], 1); +``` +> Source code: test_programs/execution_success/sha256/src/main.nr#L16-L18 + + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::sha256::sha256_var(x, 4); +} +``` + + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust title="blake2s" showLineNumbers +pub fn blake2s(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash/mod.nr#L19-L21 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## blake3 + +Given an array of bytes, returns an array with the Blake3 hash + +```rust title="blake3" showLineNumbers +pub fn blake3(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash/mod.nr#L25-L27 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake3(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust title="pedersen_hash" showLineNumbers +pub fn pedersen_hash(input: [Field; N]) -> Field +``` +> Source code: noir_stdlib/src/hash/mod.nr#L61-L63 + + +example: + +```rust title="pedersen-hash" showLineNumbers +fn main(x: Field, y: Field, expected_hash: Field) { + let hash = std::hash::pedersen_hash([x, y]); + assert_eq(hash, expected_hash); +} +``` +> Source code: test_programs/execution_success/pedersen_hash/src/main.nr#L1-L7 + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust title="pedersen_commitment" showLineNumbers +pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { +``` +> Source code: noir_stdlib/src/hash/mod.nr#L30-L32 + + +example: + +```rust title="pedersen-commitment" showLineNumbers +fn main(x: Field, y: Field, expected_commitment: std::embedded_curve_ops::EmbeddedCurvePoint) { + let commitment = std::hash::pedersen_commitment([x, y]); + assert_eq(commitment.x, expected_commitment.x); + assert_eq(commitment.y, expected_commitment.y); +} +``` +> Source code: test_programs/execution_success/pedersen_commitment/src/main.nr#L1-L8 + + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of +32 bytes (`[u8; 32]`). Specify a message_size to hash only the first +`message_size` bytes of the input. + +```rust title="keccak256" showLineNumbers +pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash/mod.nr#L104-L106 + + +example: + +```rust title="keccak256" showLineNumbers +fn main(x: Field, result: [u8; 32]) { + // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field + // The padding is taken care of by the program + let digest = std::hash::keccak256([x as u8], 1); + assert(digest == result); + + //#1399: variable message size + let message_size = 4; + let hash_a = std::hash::keccak256([1, 2, 3, 4], message_size); + let hash_b = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size); + + assert(hash_a == hash_b); + + let message_size_big = 8; + let hash_c = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size_big); + + assert(hash_a != hash_c); +} +``` +> Source code: test_programs/execution_success/keccak256/src/main.nr#L1-L21 + + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust title="poseidon" showLineNumbers +use std::hash::poseidon; + +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { + let hash1 = poseidon::bn254::hash_2(x1); + assert(hash1 == y1); + + let hash2 = poseidon::bn254::hash_4(x2); + assert(hash2 == y2); +} +``` +> Source code: test_programs/execution_success/poseidon_bn254_hash/src/main.nr#L1-L11 + + +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +example: + +```rust title="poseidon2" showLineNumbers +use std::hash::poseidon2; + +fn main(inputs: [Field; 4], expected_hash: Field) { + let hash = poseidon2::Poseidon2::hash(inputs, inputs.len()); + assert_eq(hash, expected_hash); +} +``` +> Source code: test_programs/execution_success/poseidon2/src/main.nr#L1-L8 + + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/index.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 00000000000..650f30165d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/schnorr.mdx b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 00000000000..a32138daaa6 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,64 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). +See schnorr::verify_signature_slice for a version that works directly on slices. + +```rust title="schnorr_verify" showLineNumbers +pub fn verify_signature( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L2-L9 + + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + + +## schnorr::verify_signature_slice + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin) +where the message is a slice. + +```rust title="schnorr_verify_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L13-L20 + + + diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/is_unconstrained.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/is_unconstrained.md new file mode 100644 index 00000000000..51bb1bda8f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/is_unconstrained.md @@ -0,0 +1,69 @@ +--- +title: Is Unconstrained Function +description: + The is_unconstrained function returns wether the context at that point of the program is unconstrained or not. +keywords: + [ + unconstrained + ] +--- + +It's very common for functions in circuits to take unconstrained hints of an expensive computation and then verify it. This is done by running the hint in an unconstrained context and then verifying the result in a constrained context. + +When a function is marked as unconstrained, any subsequent functions that it calls will also be run in an unconstrained context. However, if we are implementing a library function, other users might call it within an unconstrained context or a constrained one. Generally, in an unconstrained context we prefer just computing the result instead of taking a hint of it and verifying it, since that'd mean doing the same computation twice: + +```rust + +fn my_expensive_computation(){ + ... +} + +unconstrained fn my_expensive_computation_hint(){ + my_expensive_computation() +} + +pub fn external_interface(){ + my_expensive_computation_hint(); + // verify my_expensive_computation: If external_interface is called from unconstrained, this is redundant + ... +} + +``` + +In order to improve the performance in an unconstrained context you can use the function at `std::runtime::is_unconstrained() -> bool`: + + +```rust +use dep::std::runtime::is_unconstrained; + +fn my_expensive_computation(){ + ... +} + +unconstrained fn my_expensive_computation_hint(){ + my_expensive_computation() +} + +pub fn external_interface(){ + if is_unconstrained() { + my_expensive_computation(); + } else { + my_expensive_computation_hint(); + // verify my_expensive_computation + ... + } +} + +``` + +The is_unconstrained result is resolved at compile time, so in unconstrained contexts the compiler removes the else branch, and in constrained contexts the compiler removes the if branch, reducing the amount of compute necessary to run external_interface. + +Note that using `is_unconstrained` in a `comptime` context will also return `true`: + +``` +fn main() { + comptime { + assert(is_unconstrained()); + } +} +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/logging.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/logging.md new file mode 100644 index 00000000000..db75ef9f86f --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/logging.md @@ -0,0 +1,78 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +struct Person { + age: Field, + height: Field, +} + +fn main(age: Field, height: Field) { + let person = Person { + age: age, + height: height, + }; + println(person); + println(age + height); + println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + println(fmt_str); + + let s = myStruct { y: x, x: y }; + println(s); + + println(f"i: {i}, s: {s}"); + + println(x); + println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + println(f"s: {s}, foo: {foo}"); + + println(15); // prints 0x0f, implicit Field + println(-1 as u8); // prints 255 + println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +println(person); +print(person); + +println("Hello world!"); // Prints with a newline at the end of the input +print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/merkle_trees.md new file mode 100644 index 00000000000..6a9ebf72ada --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen(&[pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path.as_slice()); + println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/options.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/options.md new file mode 100644 index 00000000000..a1bd4e1de5f --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/options.md @@ -0,0 +1,101 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +The `Option` type, already imported into your Noir program, can be used directly: + +```rust +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### expect + +Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value. The custom message is expected to be a format string. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/recursion.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/recursion.md new file mode 100644 index 00000000000..8cfb37fc52d --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/recursion.md @@ -0,0 +1,85 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, verify_proof] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) + +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + +```rust +#[foreign(recursive_aggregation)] +pub fn verify_proof(verification_key: [Field], proof: [Field], public_inputs: [Field], key_hash: Field) {} +``` + + + +## Example usage + +```rust + +fn main( + verification_key : [Field; 114], + proof : [Field; 93], + public_inputs : [Field; 1], + key_hash : Field, + proof_b : [Field; 93], +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} +``` + +You can see a full example of recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/traits.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/traits.md new file mode 100644 index 00000000000..2ce8360bc83 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/traits.md @@ -0,0 +1,501 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust title="default-trait" showLineNumbers +trait Default { + fn default() -> Self; +} +``` +> Source code: noir_stdlib/src/default.nr#L1-L5 + + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for [T] { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type, +except slices whose length is unknown and thus defaulted to zero. + +--- + +## `std::convert` + +### `std::convert::From` + +```rust title="from-trait" showLineNumbers +trait From { + fn from(input: T) -> Self; +} +``` +> Source code: noir_stdlib/src/convert.nr#L1-L5 + + +The `From` trait defines how to convert from a given type `T` to the type on which the trait is implemented. + +The Noir standard library provides a number of implementations of `From` between primitive types. +```rust title="from-impls" showLineNumbers +// Unsigned integers + +impl From for u32 { fn from(value: u8) -> u32 { value as u32 } } + +impl From for u64 { fn from(value: u8) -> u64 { value as u64 } } +impl From for u64 { fn from(value: u32) -> u64 { value as u64 } } + +impl From for Field { fn from(value: u8) -> Field { value as Field } } +impl From for Field { fn from(value: u32) -> Field { value as Field } } +impl From for Field { fn from(value: u64) -> Field { value as Field } } + +// Signed integers + +impl From for i32 { fn from(value: i8) -> i32 { value as i32 } } + +impl From for i64 { fn from(value: i8) -> i64 { value as i64 } } +impl From for i64 { fn from(value: i32) -> i64 { value as i64 } } + +// Booleans +impl From for u8 { fn from(value: bool) -> u8 { value as u8 } } +impl From for u32 { fn from(value: bool) -> u32 { value as u32 } } +impl From for u64 { fn from(value: bool) -> u64 { value as u64 } } +impl From for i8 { fn from(value: bool) -> i8 { value as i8 } } +impl From for i32 { fn from(value: bool) -> i32 { value as i32 } } +impl From for i64 { fn from(value: bool) -> i64 { value as i64 } } +impl From for Field { fn from(value: bool) -> Field { value as Field } } +``` +> Source code: noir_stdlib/src/convert.nr#L25-L52 + + +#### When to implement `From` + +As a general rule of thumb, `From` may be implemented in the [situations where it would be suitable in Rust](https://doc.rust-lang.org/std/convert/trait.From.html#when-to-implement-from): + +- The conversion is *infallible*: Noir does not provide an equivalent to Rust's `TryFrom`, if the conversion can fail then provide a named method instead. +- The conversion is *lossless*: semantically, it should not lose or discard information. For example, `u32: From` can losslessly convert any `u16` into a valid `u32` such that the original `u16` can be recovered. On the other hand, `u16: From` should not be implemented as `2**16` is a `u32` which cannot be losslessly converted into a `u16`. +- The conversion is *value-preserving*: the conceptual kind and meaning of the resulting value is the same, even though the Noir type and technical representation might be different. While it's possible to infallibly and losslessly convert a `u8` into a `str<2>` hex representation, `4u8` and `"04"` are too different for `str<2>: From` to be implemented. +- The conversion is *obvious*: it's the only reasonable conversion between the two types. If there's ambiguity on how to convert between them such that the same input could potentially map to two different values then a named method should be used. For instance rather than implementing `U128: From<[u8; 16]>`, the methods `U128::from_le_bytes` and `U128::from_be_bytes` are used as otherwise the endianness of the array would be ambiguous, resulting in two potential values of `U128` from the same byte array. + +One additional recommendation specific to Noir is: +- The conversion is *efficient*: it's relatively cheap to convert between the two types. Due to being a ZK DSL, it's more important to avoid unnecessary computation compared to Rust. If the implementation of `From` would encourage users to perform unnecessary conversion, resulting in additional proving time, then it may be preferable to expose functionality such that this conversion may be avoided. + +### `std::convert::Into` + +The `Into` trait is defined as the reciprocal of `From`. It should be easy to convince yourself that if we can convert to type `A` from type `B`, then it's possible to convert type `B` into type `A`. + +For this reason, implementing `From` on a type will automatically generate a matching `Into` implementation. One should always prefer implementing `From` over `Into` as implementing `Into` will not generate a matching `From` implementation. + +```rust title="into-trait" showLineNumbers +trait Into { + fn into(self) -> T; +} + +impl Into for U where T: From { + fn into(self) -> T { + T::from(self) + } +} +``` +> Source code: noir_stdlib/src/convert.nr#L13-L23 + + +`Into` is most useful when passing function arguments where the types don't quite match up with what the function expects. In this case, the compiler has enough type information to perform the necessary conversion by just appending `.into()` onto the arguments in question. + +--- + +## `std::cmp` + +### `std::cmp::Eq` + +```rust title="eq-trait" showLineNumbers +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L1-L5 + + +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for [T] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Ord` + +```rust title="ord-trait" showLineNumbers +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L102-L106 + + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +`std::cmp` also provides `max` and `min` functions for any type which implements the `Ord` trait. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for [T] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +--- + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust title="add-trait" showLineNumbers +trait Add { + fn add(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/arith.nr#L1-L5 + +```rust title="sub-trait" showLineNumbers +trait Sub { + fn sub(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/arith.nr#L19-L23 + +```rust title="mul-trait" showLineNumbers +trait Mul { + fn mul(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/arith.nr#L37-L41 + +```rust title="div-trait" showLineNumbers +trait Div { + fn div(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/arith.nr#L55-L59 + + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust title="rem-trait" showLineNumbers +trait Rem{ + fn rem(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/arith.nr#L73-L77 + + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::Neg` + +```rust title="neg-trait" showLineNumbers +trait Neg { + fn neg(self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/arith.nr#L89-L93 + + +`Neg::neg` is equivalent to the unary negation operator `-`. + +Implementations: +```rust title="neg-trait-impls" showLineNumbers +impl Neg for Field { fn neg(self) -> Field { -self } } + +impl Neg for i8 { fn neg(self) -> i8 { -self } } +impl Neg for i16 { fn neg(self) -> i16 { -self } } +impl Neg for i32 { fn neg(self) -> i32 { -self } } +impl Neg for i64 { fn neg(self) -> i64 { -self } } +``` +> Source code: noir_stdlib/src/ops/arith.nr#L95-L102 + + +### `std::ops::Not` + +```rust title="not-trait" showLineNumbers +trait Not { + fn not(self: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/bit.nr#L1-L5 + + +`Not::not` is equivalent to the unary bitwise NOT operator `!`. + +Implementations: +```rust title="not-trait-impls" showLineNumbers +impl Not for bool { fn not(self) -> bool { !self } } + +impl Not for u64 { fn not(self) -> u64 { !self } } +impl Not for u32 { fn not(self) -> u32 { !self } } +impl Not for u16 { fn not(self) -> u16 { !self } } +impl Not for u8 { fn not(self) -> u8 { !self } } +impl Not for u1 { fn not(self) -> u1 { !self } } + +impl Not for i8 { fn not(self) -> i8 { !self } } +impl Not for i16 { fn not(self) -> i16 { !self } } +impl Not for i32 { fn not(self) -> i32 { !self } } +impl Not for i64 { fn not(self) -> i64 { !self } } +``` +> Source code: noir_stdlib/src/ops/bit.nr#L7-L20 + + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust title="bitor-trait" showLineNumbers +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/bit.nr#L22-L26 + +```rust title="bitand-trait" showLineNumbers +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/bit.nr#L40-L44 + +```rust title="bitxor-trait" showLineNumbers +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/bit.nr#L58-L62 + + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust title="shl-trait" showLineNumbers +trait Shl { + fn shl(self, other: u8) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/bit.nr#L76-L80 + +```rust title="shr-trait" showLineNumbers +trait Shr { + fn shr(self, other: u8) -> Self; +} +``` +> Source code: noir_stdlib/src/ops/bit.nr#L93-L97 + + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` + +--- + +## `std::append` + +### `std::append::Append` + +`Append` can abstract over types that can be appended to - usually container types: + +```rust title="append-trait" showLineNumbers +trait Append { + fn empty() -> Self; + fn append(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/append.nr#L9-L14 + + +`Append` requires two methods: + +- `empty`: Constructs an empty value of `Self`. +- `append`: Append two values together, returning the result. + +Additionally, it is expected that for any implementation: + +- `T::empty().append(x) == x` +- `x.append(T::empty()) == x` + +Implementations: +```rust +impl Append for [T] +impl Append for Quoted +``` diff --git a/docs/versioned_docs/version-v0.32.0/noir/standard_library/zeroed.md b/docs/versioned_docs/version-v0.32.0/noir/standard_library/zeroed.md new file mode 100644 index 00000000000..f450fecdd36 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/noir/standard_library/zeroed.md @@ -0,0 +1,26 @@ +--- +title: Zeroed Function +description: + The zeroed function returns a zeroed value of any type. +keywords: + [ + zeroed + ] +--- + +Implements `fn zeroed() -> T` to return a zeroed value of any type. This function is generally unsafe to use as the zeroed bit pattern is not guaranteed to be valid for all types. It can however, be useful in cases when the value is guaranteed not to be used such as in a BoundedVec library implementing a growable vector, up to a certain length, backed by an array. The array can be initialized with zeroed values which are guaranteed to be inaccessible until the vector is pushed to. Similarly, enumerations in noir can be implemented using this method by providing zeroed values for the unused variants. + +You can access the function at `std::unsafe::zeroed`. + +This function currently supports the following types: + +- Field +- Bool +- Uint +- Array +- Slice +- String +- Tuple +- Function + +Using it on other types could result in unexpected behavior. diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..42f065f4a4e --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,141 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../index.md#backend) +- [`Backend`](../index.md#backend) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | `CompiledCircuit` | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `acirComposer` | `any` | - | +| `acirUncompressedBytecode` | `Uint8Array` | - | +| `api` | `Barretenberg` | - | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | - | + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +*** + +### generateProof() + +```ts +generateProof(compressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `compressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<`ProofData`\> + +#### Description + +Generates a proof + +*** + +### generateRecursiveProofArtifacts() + +```ts +generateRecursiveProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +Generates artifacts that will be passed to a circuit that will verify this proof. + +Instead of passing the proof and verification key as a byte array, we pass them +as fields which makes it cheaper to verify in a circuit. + +The proof that is passed here will have been created using a circuit +that has the #[recursive] attribute on its `main` method. + +The number of public inputs denotes how many public inputs are in the inner proof. + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | `ProofData` | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Example + +```typescript +const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### getVerificationKey() + +```ts +getVerificationKey(): Promise +``` + +#### Returns + +`Promise`\<`Uint8Array`\> + +*** + +### verifyProof() + +```ts +verifyProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md new file mode 100644 index 00000000000..500276ea748 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md @@ -0,0 +1,58 @@ +# BarretenbergVerifier + +## Constructors + +### new BarretenbergVerifier(options) + +```ts +new BarretenbergVerifier(options): BarretenbergVerifier +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergVerifier`](BarretenbergVerifier.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +*** + +### verifyProof() + +```ts +verifyProof(proofData, verificationKey): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | +| `verificationKey` | `Uint8Array` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/index.md new file mode 100644 index 00000000000..14dfac681d4 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/index.md @@ -0,0 +1,40 @@ +# backend_barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | +| [BarretenbergVerifier](classes/BarretenbergVerifier.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | + +## References + +### CompiledCircuit + +Renames and re-exports [Backend](index.md#backend) + +*** + +### ProofData + +Renames and re-exports [Backend](index.md#backend) + +## Variables + +### Backend + +```ts +Backend: any; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..b49a479f4f4 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,21 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `memory` | `object` | - | +| `memory.maximum` | `number` | - | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..d7d5128f9e3 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier","label":"BarretenbergVerifier"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/.nojekyll b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/classes/Noir.md new file mode 100644 index 00000000000..ead255bc504 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/classes/Noir.md @@ -0,0 +1,52 @@ +# Noir + +## Constructors + +### new Noir(circuit) + +```ts +new Noir(circuit): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | `CompiledCircuit` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/and.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..5e3cd53e9d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/index.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/index.md new file mode 100644 index 00000000000..166508f7124 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/index.md @@ -0,0 +1,49 @@ +# noir_js + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [ErrorWithPayload](type-aliases/ErrorWithPayload.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Verifies a ECDSA signature over the secp256k1 curve. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +## References + +### CompiledCircuit + +Renames and re-exports [InputMap](index.md#inputmap) + +## Variables + +### InputMap + +```ts +InputMap: any; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ErrorWithPayload.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ErrorWithPayload.md new file mode 100644 index 00000000000..e8c2f4aef3d --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ErrorWithPayload.md @@ -0,0 +1,15 @@ +# ErrorWithPayload + +```ts +type ErrorWithPayload: ExecutionError & object; +``` + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `decodedAssertionPayload` | `any` | - | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..b3156097df6 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ErrorWithPayload","label":"ErrorWithPayload"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_js/functions/and","label":"and"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/.nojekyll b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile.md new file mode 100644 index 00000000000..6faf763b37f --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile.md @@ -0,0 +1,51 @@ +# compile() + +```ts +compile( + fileManager, + projectPath?, + logFn?, +debugLogFn?): Promise +``` + +Compiles a Noir project + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `fileManager` | `FileManager` | The file manager to use | +| `projectPath`? | `string` | The path to the project inside the file manager. Defaults to the root of the file manager | +| `logFn`? | `LogFn` | A logging function. If not provided, console.log will be used | +| `debugLogFn`? | `LogFn` | A debug logging function. If not provided, logFn will be used | + +## Returns + +`Promise`\<[`ProgramCompilationArtifacts`](../index.md#programcompilationartifacts)\> + +## Example + +```typescript +// Node.js + +import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile_program(fm); +``` + +```typescript +// Browser + +import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile_program(fm); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile_contract.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile_contract.md new file mode 100644 index 00000000000..7d0b39a43ef --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/compile_contract.md @@ -0,0 +1,51 @@ +# compile\_contract() + +```ts +compile_contract( + fileManager, + projectPath?, + logFn?, +debugLogFn?): Promise +``` + +Compiles a Noir project + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `fileManager` | `FileManager` | The file manager to use | +| `projectPath`? | `string` | The path to the project inside the file manager. Defaults to the root of the file manager | +| `logFn`? | `LogFn` | A logging function. If not provided, console.log will be used | +| `debugLogFn`? | `LogFn` | A debug logging function. If not provided, logFn will be used | + +## Returns + +`Promise`\<[`ContractCompilationArtifacts`](../index.md#contractcompilationartifacts)\> + +## Example + +```typescript +// Node.js + +import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile_contract(fm); +``` + +```typescript +// Browser + +import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile_contract(fm); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/createFileManager.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/createFileManager.md new file mode 100644 index 00000000000..7e65c1d69c7 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/createFileManager.md @@ -0,0 +1,21 @@ +# createFileManager() + +```ts +createFileManager(dataDir): FileManager +``` + +Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `dataDir` | `string` | root of the file system | + +## Returns + +`FileManager` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md new file mode 100644 index 00000000000..fcea9275341 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md @@ -0,0 +1,21 @@ +# inflateDebugSymbols() + +```ts +inflateDebugSymbols(debugSymbols): any +``` + +Decompresses and decodes the debug symbols + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `debugSymbols` | `string` | The base64 encoded debug symbols | + +## Returns + +`any` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/index.md b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/index.md new file mode 100644 index 00000000000..b6e0f9d1bc0 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/index.md @@ -0,0 +1,49 @@ +# noir_wasm + +## Exports + +### Functions + +| Function | Description | +| :------ | :------ | +| [compile](functions/compile.md) | Compiles a Noir project | +| [compile\_contract](functions/compile_contract.md) | Compiles a Noir project | +| [createFileManager](functions/createFileManager.md) | Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) | +| [inflateDebugSymbols](functions/inflateDebugSymbols.md) | Decompresses and decodes the debug symbols | + +## References + +### compile\_program + +Renames and re-exports [compile](functions/compile.md) + +## Interfaces + +### ContractCompilationArtifacts + +The compilation artifacts of a given contract. + +#### Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `contract` | `ContractArtifact` | The compiled contract. | +| `warnings` | `unknown`[] | Compilation warnings. | + +*** + +### ProgramCompilationArtifacts + +The compilation artifacts of a given program. + +#### Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | not part of the compilation output, injected later | +| `program` | `ProgramArtifact` | The compiled contract. | +| `warnings` | `unknown`[] | Compilation warnings. | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs new file mode 100644 index 00000000000..e0870710349 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"doc","id":"reference/NoirJS/noir_wasm/index","label":"API"},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/compile","label":"compile"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/compile_contract","label":"compile_contract"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/createFileManager","label":"createFileManager"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/inflateDebugSymbols","label":"inflateDebugSymbols"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/_category_.json b/docs/versioned_docs/version-v0.32.0/reference/_category_.json new file mode 100644 index 00000000000..5b6a20a609a --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/reference/debugger/_category_.json b/docs/versioned_docs/version-v0.32.0/reference/debugger/_category_.json new file mode 100644 index 00000000000..27869205ad3 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugger", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_known_limitations.md b/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_known_limitations.md new file mode 100644 index 00000000000..936d416ac4b --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_known_limitations.md @@ -0,0 +1,59 @@ +--- +title: Known limitations +description: + An overview of known limitations of the current version of the Noir debugger +keywords: + [ + Nargo, + Noir Debugger, + VS Code, + ] +sidebar_position: 2 +--- + +# Debugger Known Limitations + +There are currently some limits to what the debugger can observe. + +## Mutable references + +The debugger is currently blind to any state mutated via a mutable reference. For example, in: + +``` +let mut x = 1; +let y = &mut x; +*y = 2; +``` + +The update on `x` will not be observed by the debugger. That means, when running `vars` from the debugger REPL, or inspecting the _local variables_ pane in the VS Code debugger, `x` will appear with value 1 despite having executed `*y = 2;`. + +## Variables of type function or mutable references are opaque + +When inspecting variables, any variable of type `Function` or `MutableReference` will render its value as `<>` or `<>`. + +## Debugger instrumentation affects resulting ACIR + +In order to make the state of local variables observable, the debugger compiles Noir circuits interleaving foreign calls that track any mutations to them. While this works (except in the cases described above) and doesn't introduce any behavior changes, it does as a side effect produce bigger bytecode. In particular, when running the command `opcodes` on the REPL debugger, you will notice Unconstrained VM blocks that look like this: + +``` +... +5 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [], q_c: 2 }), Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(2))], q_c: 0 })] + | outputs=[] + 5.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 5.1 | Mov { destination: RegisterIndex(3), source: RegisterIndex(1) } + 5.2 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 5.3 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 5.4 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 5.5 | Mov { destination: RegisterIndex(3), source: RegisterIndex(3) } + 5.6 | Call { location: 8 } + 5.7 | Stop + 5.8 | ForeignCall { function: "__debug_var_assign", destinations: [], inputs: [RegisterIndex(RegisterIndex(2)), RegisterIndex(RegisterIndex(3))] } +... +``` + +If you are interested in debugging/inspecting compiled ACIR without these synthetic changes, you can invoke the REPL debugger with the `--skip-instrumentation` flag or launch the VS Code debugger with the `skipConfiguration` property set to true in its launch configuration. You can find more details about those in the [Debugger REPL reference](debugger_repl.md) and the [VS Code Debugger reference](debugger_vscode.md). + +:::note +Skipping debugger instrumentation means you won't be able to inspect values of local variables. +::: + diff --git a/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_repl.md b/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_repl.md new file mode 100644 index 00000000000..46e2011304e --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_repl.md @@ -0,0 +1,360 @@ +--- +title: REPL Debugger +description: + Noir Debugger REPL options and commands. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +## Running the REPL debugger + +`nargo debug [OPTIONS] [WITNESS_NAME]` + +Runs the Noir REPL debugger. If a `WITNESS_NAME` is provided the debugger writes the resulting execution witness to a `WITNESS_NAME` file. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover]| +| `--package ` | The name of the package to debug | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +None of these options are required. + +:::note +Since the debugger starts by compiling the target package, all Noir compiler options are also available. Check out the [compiler reference](../nargo_commands.md#nargo-compile) to learn more about the compiler options. +::: + +## REPL commands + +Once the debugger is running, it accepts the following commands. + +#### `help` (h) + +Displays the menu of available commands. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) value + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +### Stepping through programs + +#### `next` (n) + +Step until the next Noir source code location. While other commands, such as [`into`](#into-i) and [`step`](#step-s), allow for finer grained control of the program's execution at the opcode level, `next` is source code centric. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `next` here would cause the debugger to jump to the definition of `deep_entry_point` (if available). + +If you want to step over `deep_entry_point` and go straight to line 8, use [the `over` command](#over) instead. + +#### `over` + +Step until the next source code location, without diving into function calls. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `over` here would cause the debugger to execute until line 8 (`multiple_values_entry_point(x);`). + +If you want to step into `deep_entry_point` instead, use [the `next` command](#next-n). + +#### `out` + +Step until the end of the current function call. For example: + +``` + 3 ... + 4 fn main(x: u32) { + 5 assert(entry_point(x) == 2); + 6 swap_entry_point(x, x + 1); + 7 -> assert(deep_entry_point(x) == 4); + 8 multiple_values_entry_point(x); + 9 } + 10 + 11 unconstrained fn returns_multiple_values(x: u32) -> (u32, u32, u32, u32) { + 12 ... + ... + 55 + 56 unconstrained fn deep_entry_point(x: u32) -> u32 { + 57 -> level_1(x + 1) + 58 } + +``` + +Running `out` here will resume execution until line 8. + +#### `step` (s) + +Skips to the next ACIR code. A compiled Noir program is a sequence of ACIR opcodes. However, an unconstrained VM opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `step` command at this point would result in the debugger stopping at ACIR opcode 2, `EXPR`, skipping unconstrained computation steps. + +Use [the `into` command](#into-i) instead if you want to follow unconstrained computation step by step. + +#### `into` (i) + +Steps into the next opcode. A compiled Noir program is a sequence of ACIR opcodes. However, a BRILLIG opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `into` command at this point would result in the debugger stopping at opcode 1.0, `Mov ...`, allowing the debugger user to follow unconstrained computation step by step. + +Use [the `step` command](#step-s) instead if you want to skip to the next ACIR code directly. + +#### `continue` (c) + +Continues execution until the next breakpoint, or the end of the program. + +#### `restart` (res) + +Interrupts execution, and restarts a new debugging session from scratch. + +#### `opcodes` (o) + +Display the program's ACIR opcode sequence. For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +### Breakpoints + +#### `break [Opcode]` (or shorthand `b [Opcode]`) + +Sets a breakpoint on the specified opcode index. To get a list of the program opcode numbers, see [the `opcode` command](#opcodes-o). For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +In this example, issuing a `break 1.2` command adds break on opcode 1.2, as denoted by the `*` character: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | * Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +Running [the `continue` command](#continue-c) at this point would cause the debugger to execute the program until opcode 1.2. + +#### `delete [Opcode]` (or shorthand `d [Opcode]`) + +Deletes a breakpoint at an opcode location. Usage is analogous to [the `break` command](#). + +### Variable inspection + +#### vars + +Show variable values available at this point in execution. + +:::note +The ability to inspect variable values from the debugger depends on compilation to be run in a special debug instrumentation mode. This instrumentation weaves variable tracing code with the original source code. + +So variable value inspection comes at the expense of making the resulting ACIR bytecode bigger and harder to understand and optimize. + +If you find this compromise unacceptable, you can run the debugger with the flag `--skip-debug-instrumentation`. This will compile your circuit without any additional debug information, so the resulting ACIR bytecode will be identical to the one produced by standard Noir compilation. However, if you opt for this, the `vars` command will not be available while debugging. +::: + + +### Stacktrace + +#### `stacktrace` + +Displays the current stack trace. + + +### Witness map + +#### `witness` (w) + +Show witness map. For example: + +``` +_0 = 0 +_1 = 2 +_2 = 1 +``` + +#### `witness [Witness Index]` + +Display a single witness from the witness map. For example: + +``` +> witness 1 +_1 = 2 +``` + +#### `witness [Witness Index] [New value]` + +Overwrite the given index with a new value. For example: + +``` +> witness 1 3 +_1 = 3 +``` + + +### Unconstrained VM memory + +#### `memory` + +Show unconstrained VM memory state. For example: + +``` +> memory +At opcode 1.13: Store { destination_pointer: RegisterIndex(0), source: RegisterIndex(3) } +... +> registers +0 = 0 +1 = 10 +2 = 0 +3 = 1 +4 = 1 +5 = 2³² +6 = 1 +> into +At opcode 1.14: Const { destination: RegisterIndex(5), value: Value { inner: 1 } } +... +> memory +0 = 1 +> +``` + +In the example above: we start with clean memory, then step through a `Store` opcode which stores the value of register 3 (1) into the memory address stored in register 0 (0). Thus now `memory` shows memory address 0 contains value 1. + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: + +#### `memset [Memory address] [New value]` + +Update a memory cell with the given value. For example: + +``` +> memory +0 = 1 +> memset 0 2 +> memory +0 = 2 +> memset 1 4 +> memory +0 = 2 +1 = 4 +> +``` + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_vscode.md b/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_vscode.md new file mode 100644 index 00000000000..c027332b3b0 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/debugger/debugger_vscode.md @@ -0,0 +1,82 @@ +--- +title: VS Code Debugger +description: + VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +# VS Code Noir Debugger Reference + +The Noir debugger enabled by the vscode-noir extension ships with default settings such that the most common scenario should run without any additional configuration steps. + +These defaults can nevertheless be overridden by defining a launch configuration file. This page provides a reference for the properties you can override via a launch configuration file, as well as documenting the Nargo `dap` command, which is a dependency of the VS Code Noir debugger. + + +## Creating and editing launch configuration files + +To create a launch configuration file from VS Code, open the _debug pane_, and click on _create a launch.json file_. + +![Creating a launch configuration file](@site/static/img/debugger/ref1-create-launch.png) + +A `launch.json` file will be created, populated with basic defaults. + +### Noir Debugger launch.json properties + +#### projectFolder + +_String, optional._ + +Absolute path to the Nargo project to debug. By default, it is dynamically determined by looking for the nearest `Nargo.toml` file to the active file at the moment of launching the debugger. + +#### proverName + +_String, optional._ + +Name of the prover input to use. Defaults to `Prover`, which looks for a file named `Prover.toml` at the `projectFolder`. + +#### generateAcir + +_Boolean, optional._ + +If true, generate ACIR opcodes instead of unconstrained opcodes which will be closer to release binaries but less convenient for debugging. Defaults to `false`. + +#### skipInstrumentation + +_Boolean, optional._ + +Skips variables debugging instrumentation of code, making debugging less convenient but the resulting binary smaller and closer to production. Defaults to `false`. + +:::note +Skipping instrumentation causes the debugger to be unable to inspect local variables. +::: + +## `nargo dap [OPTIONS]` + +When run without any option flags, it starts the Nargo Debug Adapter Protocol server, which acts as the debugging backend for the VS Code Noir Debugger. + +All option flags are related to preflight checks. The Debug Adapter Protocol specifies how errors are to be informed from a running DAP server, but it doesn't specify mechanisms to communicate server initialization errors between the DAP server and its client IDE. + +Thus `nargo dap` ships with a _preflight check_ mode. If flag `--preflight-check` and the rest of the `--preflight-*` flags are provided, Nargo will run the same initialization routine except it will not start the DAP server. + +`vscode-noir` will then run `nargo dap` in preflight check mode first before a debugging session starts. If the preflight check ends in error, vscode-noir will present stderr and stdout output from this process through its own Output pane in VS Code. This makes it possible for users to diagnose what pieces of configuration might be wrong or missing in case of initialization errors. + +If the preflight check succeeds, `vscode-noir` proceeds to start the DAP server normally but running `nargo dap` without any additional flags. + +### Options + +| Option | Description | +| --------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| `--preflight-check` | If present, dap runs in preflight check mode. | +| `--preflight-project-folder ` | Absolute path to the project to debug for preflight check. | +| `--preflight-prover-name ` | Name of prover file to use for preflight check | +| `--preflight-generate-acir` | Optional. If present, compile in ACIR mode while running preflight check. | +| `--preflight-skip-instrumentation` | Optional. If present, compile without introducing debug instrumentation while running preflight check. | +| `-h, --help` | Print help. | diff --git a/docs/versioned_docs/version-v0.32.0/reference/nargo_commands.md b/docs/versioned_docs/version-v0.32.0/reference/nargo_commands.md new file mode 100644 index 00000000000..2d6defc5a44 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/reference/nargo_commands.md @@ -0,0 +1,271 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +# Command-Line Help for `nargo` + +This document contains the help content for the `nargo` command-line program. + +**Command Overview:** + +* [`nargo`↴](#nargo) +* [`nargo check`↴](#nargo-check) +* [`nargo fmt`↴](#nargo-fmt) +* [`nargo compile`↴](#nargo-compile) +* [`nargo new`↴](#nargo-new) +* [`nargo init`↴](#nargo-init) +* [`nargo execute`↴](#nargo-execute) +* [`nargo debug`↴](#nargo-debug) +* [`nargo test`↴](#nargo-test) +* [`nargo info`↴](#nargo-info) +* [`nargo lsp`↴](#nargo-lsp) + +## `nargo` + +Noir's package manager + +**Usage:** `nargo ` + +###### **Subcommands:** + +* `check` — Checks the constraint system for errors +* `fmt` — Format the Noir files in a workspace +* `compile` — Compile the program and its secret execution trace into ACIR format +* `new` — Create a Noir project in a new directory +* `init` — Create a Noir project in the current directory +* `execute` — Executes a circuit to calculate its return value +* `debug` — Executes a circuit in debug mode +* `test` — Run the tests for this program +* `info` — Provides detailed information on each of a program's function (represented by a single circuit) +* `lsp` — Starts the Noir LSP server + +###### **Options:** + + + + +## `nargo check` + +Checks the constraint system for errors + +**Usage:** `nargo check [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to check +* `--workspace` — Check all packages in the workspace +* `--overwrite` — Force overwrite of existing files +* `--expression-width ` — Specify the backend expression width that should be targeted +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--debug-comptime-in-file ` — Enable printing results of comptime evaluation: provide a path suffix for the module to debug, e.g. "package_name/src/main.nr" + + + +## `nargo fmt` + +Format the Noir files in a workspace + +**Usage:** `nargo fmt [OPTIONS]` + +###### **Options:** + +* `--check` — Run noirfmt in check mode + + + +## `nargo compile` + +Compile the program and its secret execution trace into ACIR format + +**Usage:** `nargo compile [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to compile +* `--workspace` — Compile all packages in the workspace +* `--expression-width ` — Specify the backend expression width that should be targeted +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--debug-comptime-in-file ` — Enable printing results of comptime evaluation: provide a path suffix for the module to debug, e.g. "package_name/src/main.nr" + + + +## `nargo new` + +Create a Noir project in a new directory + +**Usage:** `nargo new [OPTIONS] ` + +###### **Arguments:** + +* `` — The path to save the new project + +###### **Options:** + +* `--name ` — Name of the package [default: package directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo init` + +Create a Noir project in the current directory + +**Usage:** `nargo init [OPTIONS]` + +###### **Options:** + +* `--name ` — Name of the package [default: current directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo execute` + +Executes a circuit to calculate its return value + +**Usage:** `nargo execute [OPTIONS] [WITNESS_NAME]` + +###### **Arguments:** + +* `` — Write the execution witness to named file + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `--package ` — The name of the package to execute +* `--workspace` — Execute all packages in the workspace +* `--expression-width ` — Specify the backend expression width that should be targeted +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--debug-comptime-in-file ` — Enable printing results of comptime evaluation: provide a path suffix for the module to debug, e.g. "package_name/src/main.nr" +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo debug` + +Executes a circuit in debug mode + +**Usage:** `nargo debug [OPTIONS] [WITNESS_NAME]` + +###### **Arguments:** + +* `` — Write the execution witness to named file + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `--package ` — The name of the package to execute +* `--expression-width ` — Specify the backend expression width that should be targeted +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--debug-comptime-in-file ` — Enable printing results of comptime evaluation: provide a path suffix for the module to debug, e.g. "package_name/src/main.nr" +* `--acir-mode` — Force ACIR output (disabling instrumentation) +* `--skip-instrumentation ` — Disable vars debug instrumentation (enabled by default) + + Possible values: `true`, `false` + + + + +## `nargo test` + +Run the tests for this program + +**Usage:** `nargo test [OPTIONS] [TEST_NAME]` + +###### **Arguments:** + +* `` — If given, only tests with names containing this string will be run + +###### **Options:** + +* `--show-output` — Display output of `println` statements +* `--exact` — Only run tests that match exactly +* `--package ` — The name of the package to test +* `--workspace` — Test all packages in the workspace +* `--expression-width ` — Specify the backend expression width that should be targeted +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--debug-comptime-in-file ` — Enable printing results of comptime evaluation: provide a path suffix for the module to debug, e.g. "package_name/src/main.nr" +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo info` + +Provides detailed information on each of a program's function (represented by a single circuit) + +Current information provided per circuit: 1. The number of ACIR opcodes 2. Counts the final number gates in the circuit used by a backend + +**Usage:** `nargo info [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to detail +* `--workspace` — Detail all packages in the workspace +* `--expression-width ` — Specify the backend expression width that should be targeted +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--debug-comptime-in-file ` — Enable printing results of comptime evaluation: provide a path suffix for the module to debug, e.g. "package_name/src/main.nr" + + + +## `nargo lsp` + +Starts the Noir LSP server + +Starts an LSP server which allows IDEs such as VS Code to display diagnostics in Noir source. + +VS Code Noir Language Support: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir + +**Usage:** `nargo lsp` + + + +
+ + + This document was generated automatically by + clap-markdown. + + diff --git a/docs/versioned_docs/version-v0.32.0/tooling/debugger.md b/docs/versioned_docs/version-v0.32.0/tooling/debugger.md new file mode 100644 index 00000000000..9b7565ba9ff --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/tooling/debugger.md @@ -0,0 +1,26 @@ +--- +title: Debugger +description: Learn about the Noir Debugger, in its REPL or VS Code versions. +keywords: [Nargo, VSCode, Visual Studio Code, REPL, Debugger] +sidebar_position: 2 +--- + +# Noir Debugger + +There are currently two ways of debugging Noir programs: + +1. From VS Code, via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). +2. Via the REPL debugger, which ships with Nargo. + +In order to use either version of the debugger, you will need to install recent enough versions of Noir, [Nargo](../getting_started/installation/index.md) and vscode-noir: + +- Noir & Nargo ≥0.28.0 +- Noir's VS Code extension ≥0.0.11 + +:::info +At the moment, the debugger supports debugging binary projects, but not contracts. +::: + +We cover the VS Code Noir debugger more in depth in [its VS Code debugger how-to guide](../how_to/debugger/debugging_with_vs_code.md) and [the reference](../reference/debugger/debugger_vscode.md). + +The REPL debugger is discussed at length in [the REPL debugger how-to guide](../how_to/debugger/debugging_with_the_repl.md) and [the reference](../reference/debugger/debugger_repl.md). diff --git a/docs/versioned_docs/version-v0.32.0/tooling/language_server.md b/docs/versioned_docs/version-v0.32.0/tooling/language_server.md new file mode 100644 index 00000000000..81e0356ef8a --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.32.0/tooling/testing.md b/docs/versioned_docs/version-v0.32.0/tooling/testing.md new file mode 100644 index 00000000000..866677da567 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/tooling/testing.md @@ -0,0 +1,79 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = ""`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} +``` + +The string given to `should_fail_with` doesn't need to exactly match the failure reason, it just needs to be a substring of it: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "airspeed velocity")] +fn test_bridgekeeper() { + main(32); +} +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md b/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md new file mode 100644 index 00000000000..cbb1938a5c6 --- /dev/null +++ b/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md @@ -0,0 +1,327 @@ +--- +title: Building a web app with NoirJS +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment. +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs, app] +sidebar_position: 0 +pagination_next: noir/concepts/data_types/index +--- + +NoirJS is a set of packages meant to work both in a browser and a server environment. In this tutorial, we will build a simple web app using them. From here, you should get an idea on how to proceed with your own Noir projects! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Setup + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.27.x matches `noir_js@0.27.x`, etc. + +In this guide, we will be pinned to 0.27.0. + +::: + +Before we start, we want to make sure we have Node and Nargo installed. + +We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). + +As for `Nargo`, we can follow the [Nargo guide](../getting_started/installation/index.md) to install it. If you're lazy, just paste this on a terminal and run `noirup`: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Easy enough. Onwards! + +## Our project + +ZK is a powerful technology. An app that doesn't reveal one of the inputs to _anyone_ is almost unbelievable, yet Noir makes it as easy as a single line of code. + +In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! + +### Nargo + +Run: + +`nargo new circuit` + +And... That's about it. Your program is ready to be compiled and run. + +To compile, let's `cd` into the `circuit` folder to enter our project, and call: + +`nargo compile` + +This compiles our circuit into `json` format and add it to a new `target` folder. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit <---- our working directory + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +::: + +### Node and Vite + +If you want to explore Nargo, feel free to go on a side-quest now and follow the steps in the +[getting started](../getting_started/hello_noir/index.md) guide. However, we want our app to run on the browser, so we need Vite. + +Vite is a powerful tool to generate static websites. While it provides all kinds of features, let's just go barebones with some good old vanilla JS. + +To do this this, go back to the previous folder (`cd ..`) and create a new vite project by running `npm create vite` and choosing "Vanilla" and "Javascript". + +A wild `vite-project` directory should now appear in your root folder! Let's not waste any time and dive right in: + +```bash +cd vite-project +``` + +### Setting Up Vite and Configuring the Project + +Before we proceed with any coding, let's get our environment tailored for Noir. We'll start by laying down the foundations with a `vite.config.js` file. This little piece of configuration is our secret sauce for making sure everything meshes well with the NoirJS libraries and other special setups we might need, like handling WebAssembly modules. Here’s how you get that going: + +#### Creating the vite.config.js + +In your freshly minted `vite-project` folder, create a new file named `vite.config.js` and open it in your code editor. Paste the following to set the stage: + +```javascript +import { defineConfig } from "vite"; +import copy from "rollup-plugin-copy"; + +export default defineConfig({ + esbuild: { + target: "esnext", + }, + optimizeDeps: { + esbuildOptions: { + target: "esnext", + }, + }, + plugins: [ + copy({ + targets: [ + { src: "node_modules/**/*.wasm", dest: "node_modules/.vite/dist" }, + ], + copySync: true, + hook: "buildStart", + }), + ], + server: { + port: 3000, + }, +}); +``` + +#### Install Dependencies + +Now that our stage is set, install the necessary NoirJS packages along with our other dependencies: + +```bash +npm install && npm install @noir-lang/backend_barretenberg@0.27.0 @noir-lang/noir_js@0.27.0 +npm install rollup-plugin-copy --save-dev +``` + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...etc... +└── vite-project <---- our working directory + └── ...etc... +``` + +::: + +#### Some cleanup + +`npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `vite.config.js`, `index.html`, `main.js` and `package.json`. I feel lighter already. + +![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) + +## HTML + +Our app won't run like this, of course. We need some working HTML, at least. Let's open our broken-hearted `index.html` and replace everything with this code snippet: + +```html + + + + + + +

Noir app

+
+ + +
+
+

Logs

+

Proof

+
+ + +``` + +It _could_ be a beautiful UI... Depending on which universe you live in. + +## Some good old vanilla Javascript + +Our love for Noir needs undivided attention, so let's just open `main.js` and delete everything (this is where the romantic scenery becomes a bit creepy). + +Start by pasting in this boilerplate code: + +```js +const setup = async () => { + await Promise.all([ + import('@noir-lang/noirc_abi').then((module) => + module.default(new URL('@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm', import.meta.url).toString()), + ), + import('@noir-lang/acvm_js').then((module) => + module.default(new URL('@noir-lang/acvm_js/web/acvm_js_bg.wasm', import.meta.url).toString()), + ), + ]); +}; + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} + +document.getElementById('submitGuess').addEventListener('click', async () => { + try { + // here's where love happens + } catch (err) { + display('logs', 'Oh 💔 Wrong guess'); + } +}); +``` + +The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 + +As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...same as above +└── vite-project + ├── vite.config.js + ├── main.js + ├── package.json + └── index.html +``` + +You'll see other files and folders showing up (like `package-lock.json`, `node_modules`) but you shouldn't have to care about those. + +::: + +## Some NoirJS + +We're starting with the good stuff now. If you've compiled the circuit as described above, you should have a `json` file we want to import at the very top of our `main.js` file: + +```ts +import circuit from '../circuit/target/circuit.json'; +``` + +[Noir is backend-agnostic](../index.mdx#whats-new-about-noir). We write Noir, but we also need a proving backend. That's why we need to import and instantiate the two dependencies we installed above: `BarretenbergBackend` and `Noir`. Let's import them right below: + +```js +import { BarretenbergBackend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +And instantiate them inside our try-catch block: + +```ts +// try { +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit); +// } +``` + +:::note + +For the remainder of the tutorial, everything will be happening inside the `try` block + +::: + +## Our app + +Now for the app itself. We're capturing whatever is in the input when people press the submit button. Just add this: + +```js +const x = parseInt(document.getElementById('guessInput').value); +const input = { x, y: 2 }; +``` + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +await setup(); // let's squeeze our wasm inits here + +display('logs', 'Generating proof... ⌛'); +const { witness } = await noir.execute(input); +const proof = await backend.generateProof(witness); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm run dev`. If it doesn't open a browser for you, just visit `localhost:5173`. You should now see the worst UI ever, with an ugly input. + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +Now, our circuit says `fn main(x: Field, y: pub Field)`. This means only the `y` value is public, and it's hardcoded above: `input = { x, y: 2 }`. In other words, you won't need to send your secret`x` to the verifier! + +By inputting any number other than 2 in the input box and clicking "submit", you should get a valid proof. Otherwise the proof won't even generate correctly. By the way, if you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human ❤️. + +## Verifying + +Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add these lines to see our proof being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verificationKey = await backend.getVerificationKey(); +const verifier = new Verifier(); +const isValid = await verifier.verifyProof(proof, verificationKey); +if (isValid) display('logs', 'Verifying proof... ✅'); +``` + +You have successfully generated a client-side Noir web app! + +![coded app without math knowledge](@site/static/img/memes/flextape.jpeg) + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_sidebars/version-v0.32.0-sidebars.json b/docs/versioned_sidebars/version-v0.32.0-sidebars.json new file mode 100644 index 00000000000..b9ad026f69f --- /dev/null +++ b/docs/versioned_sidebars/version-v0.32.0-sidebars.json @@ -0,0 +1,93 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "The Noir Language", + "items": [ + { + "type": "autogenerated", + "dirName": "noir" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "category", + "label": "How To Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "how_to" + } + ] + }, + { + "type": "category", + "label": "Explainers", + "items": [ + { + "type": "autogenerated", + "dirName": "explainers" + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "autogenerated", + "dirName": "tutorials" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "reference" + } + ] + }, + { + "type": "category", + "label": "Tooling", + "items": [ + { + "type": "autogenerated", + "dirName": "tooling" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/noir_stdlib/src/collections/mod.nr b/noir_stdlib/src/collections/mod.nr index 2d952f4d6cd..29f3e8cc854 100644 --- a/noir_stdlib/src/collections/mod.nr +++ b/noir_stdlib/src/collections/mod.nr @@ -1,3 +1,4 @@ mod vec; mod bounded_vec; mod map; +mod umap; diff --git a/noir_stdlib/src/collections/umap.nr b/noir_stdlib/src/collections/umap.nr new file mode 100644 index 00000000000..fe16ef6bca2 --- /dev/null +++ b/noir_stdlib/src/collections/umap.nr @@ -0,0 +1,469 @@ +use crate::cmp::Eq; +use crate::collections::vec::Vec; +use crate::option::Option; +use crate::default::Default; +use crate::hash::{Hash, Hasher, BuildHasher, BuildHasherDefault}; +use crate::hash::poseidon2::Poseidon2; +use crate::collections::bounded_vec::BoundedVec; + +// An unconstrained hash table with open addressing and quadratic probing. +// Note that "unconstrained" here means that almost all operations on this +// map are unconstrained and importantly are not constrained afterward either. +// This map is meant to be used in unconstrained or comptime code where this +// is not an issue. +// +// Compared to the constrained HashMap type, UHashMap can grow automatically +// as needed and is more efficient since it can break out of loops early. +struct UHashMap { + _table: [Slot], + + // Amount of valid elements in the map. + _len: u32, + + _build_hasher: B +} + +// Data unit in the UHashMap table. +// In case Noir adds support for enums in the future, this +// should be refactored to have three states: +// 1. (key, value) +// 2. (empty) +// 3. (deleted) +struct Slot { + _key_value: Option<(K, V)>, + _is_deleted: bool, +} + +impl Default for Slot{ + fn default() -> Self{ + Slot{ + _key_value: Option::none(), + _is_deleted: false + } + } +} + +impl Slot { + fn is_valid(self) -> bool { + !self._is_deleted & self._key_value.is_some() + } + + fn is_available(self) -> bool { + self._is_deleted | self._key_value.is_none() + } + + fn key_value(self) -> Option<(K, V)> { + self._key_value + } + + fn key_value_unchecked(self) -> (K, V) { + self._key_value.unwrap_unchecked() + } + + fn set(&mut self, key: K, value: V) { + self._key_value = Option::some((key, value)); + self._is_deleted = false; + } + + // Shall not override `_key_value` with Option::none(), + // because we must be able to differentiate empty + // and deleted slots for lookup. + fn mark_deleted(&mut self) { + self._is_deleted = true; + } +} + +// While conducting lookup, we iterate attempt from 0 to N - 1 due to heuristic, +// that if we have went that far without finding desired, +// it is very unlikely to be after - performance will be heavily degraded. +impl UHashMap { + // Creates a new instance of UHashMap with specified BuildHasher. + // docs:start:with_hasher + pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { + // docs:end:with_hasher + let _table = &[Slot::default()]; + let _len = 0; + Self { _table, _len, _build_hasher } + } + + pub fn with_hasher_and_capacity(_build_hasher: B, capacity: u32) -> Self + where + B: BuildHasher { + // docs:end:with_hasher + let mut _table = &[]; + for _ in 0..capacity { + _table = _table.push_back(Slot::default()); + } + let _len = 0; + Self { _table, _len, _build_hasher } + } + + // Clears the map, removing all key-value entries. + // docs:start:clear + pub fn clear(&mut self) { + // docs:end:clear + self._table = &[Slot::default()]; + self._len = 0; + } + + // Returns true if the map contains a value for the specified key. + // docs:start:contains_key + pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { + // docs:end:contains_key + self.get(key).is_some() + } + + // Returns true if the map contains no elements. + // docs:start:is_empty + pub fn is_empty(self) -> bool { + // docs:end:is_empty + self._len == 0 + } + + // Returns a BoundedVec of all valid entries in this UHashMap. + // The length of the returned vector will always match the length of this UHashMap. + // docs:start:entries + pub fn entries(self) -> [(K, V)] { + // docs:end:entries + let mut entries = &[]; + + for slot in self._table { + if slot.is_valid() { + // SAFETY: slot.is_valid() should ensure there is a valid key-value pairing here + let key_value = slot.key_value().unwrap_unchecked(); + entries = entries.push_back(key_value); + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + assert(entries.len() == self._len, msg); + + entries + } + + // Returns a BoundedVec containing all the keys within this UHashMap. + // The length of the returned vector will always match the length of this UHashMap. + // docs:start:keys + pub fn keys(self) -> [K] { + // docs:end:keys + let mut keys = &[]; + + for slot in self._table { + if slot.is_valid() { + let (key, _) = slot.key_value_unchecked(); + keys = keys.push_back(key); + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + assert(keys.len() == self._len, msg); + + keys + } + + // Returns a BoundedVec containing all the values within this UHashMap. + // The length of the returned vector will always match the length of this UHashMap. + // docs:start:values + pub fn values(self) -> [V] { + // docs:end:values + let mut values = &[]; + + for slot in self._table { + if slot.is_valid() { + let (_, value) = slot.key_value_unchecked(); + values = values.push_back(value); + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + assert(values.len() == self._len, msg); + + values + } + + // For each key-value entry applies mutator function. + // docs:start:iter_mut + unconstrained pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + // docs:end:iter_mut + let mut entries = self.entries(); + let mut new_map = UHashMap::with_hasher(self._build_hasher); + + for entry in entries { + let (key, value) = f(entry.0, entry.1); + new_map.insert(key, value); + } + + self._table = new_map._table; + } + + // For each key applies mutator function. + // docs:start:iter_keys_mut + unconstrained pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + // docs:end:iter_keys_mut + let mut entries = self.entries(); + let mut new_map = UHashMap::with_hasher(self._build_hasher); + + for entry in entries { + let (key, value) = (f(entry.0), entry.1); + new_map.insert(key, value); + } + + self._table = new_map._table; + } + + // For each value applies mutator function. + // docs:start:iter_values_mut + pub fn iter_values_mut(&mut self, f: fn(V) -> V) { + // docs:end:iter_values_mut + for i in 0..self._table.len() { + let mut slot = self._table[i]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + slot.set(key, f(value)); + self._table[i] = slot; + } + } + } + + // Retains only the elements specified by the predicate. + // docs:start:retain + pub fn retain(&mut self, f: fn(K, V) -> bool) { + // docs:end:retain + for index in 0..self._table.len() { + let mut slot = self._table[index]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + if !f(key, value) { + slot.mark_deleted(); + self._len -= 1; + self._table[index] = slot; + } + } + } + } + + // Amount of active key-value entries. + // docs:start:len + pub fn len(self) -> u32 { + // docs:end:len + self._len + } + + // Get the current capacity of the inner table. + // docs:start:capacity + pub fn capacity(self: Self) -> u32 { + // docs:end:capacity + self._table.len() + } + + // Get the value by key. If it does not exist, returns none(). + // docs:start:get + unconstrained pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + // docs:end:get + let mut result = Option::none(); + + let hash = self.hash(key); + + for attempt in 0..self._table.len() { + let index = self.quadratic_probe(hash, attempt as u32); + let slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, value) = slot.key_value_unchecked(); + if current_key == key { + result = Option::some(value); + break; + } + } + } + + result + } + + // Insert key-value entry. In case key was already present, value is overridden. + // docs:start:insert + unconstrained pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + // docs:end:insert + self.try_resize(); + + let hash = self.hash(key); + + for attempt in 0..self._table.len() { + let index = self.quadratic_probe(hash, attempt as u32); + let mut slot = self._table[index]; + let mut insert = false; + + // Either marked as deleted or has unset key-value. + if slot.is_available() { + insert = true; + self._len += 1; + } else { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + insert = true; + } + } + + if insert { + slot.set(key, value); + self._table[index] = slot; + break; + } + } + } + + unconstrained fn try_resize(&mut self) + where B: BuildHasher, K: Eq + Hash, H: Hasher { + if self.len() + 1 >= self.capacity() / 2 { + let capacity = self.capacity() * 2; + let mut new_map = UHashMap::with_hasher_and_capacity(self._build_hasher, capacity); + + for entry in self.entries() { + new_map.insert(entry.0, entry.1); + } + *self = new_map; + } + } + + // Removes a key-value entry. If key is not present, UHashMap remains unchanged. + // docs:start:remove + unconstrained pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + // docs:end:remove + let hash = self.hash(key); + + for attempt in 0..self._table.len() { + let index = self.quadratic_probe(hash, attempt as u32); + let mut slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + slot.mark_deleted(); + self._table[index] = slot; + self._len -= 1; + break; + } + } + } + } + + // Apply UHashMap's hasher onto key to obtain pre-hash for probing. + fn hash( + self, + key: K + ) -> u32 + where + K: Hash, + B: BuildHasher, + H: Hasher { + let mut hasher = self._build_hasher.build_hasher(); + key.hash(&mut hasher); + hasher.finish() as u32 + } + + // Probing scheme: quadratic function. + // We use 0.5 constant near variadic attempt and attempt^2 monomials. + // This ensures good uniformity of distribution for table sizes + // equal to prime numbers or powers of two. + fn quadratic_probe(self: Self, hash: u32, attempt: u32) -> u32 { + (hash + (attempt + attempt * attempt) / 2) % self._table.len() + } +} + +// Equality class on UHashMap has to test that they have +// equal sets of key-value entries, +// thus one is a subset of the other and vice versa. +// docs:start:eq +impl Eq for UHashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: UHashMap) -> bool { +// docs:end:eq + let mut equal = false; + + if self.len() == other.len(){ + equal = true; + for slot in self._table{ + // Not marked as deleted and has key-value. + if equal & slot.is_valid(){ + let (key, value) = slot.key_value_unchecked(); + let other_value = other.get(key); + + if other_value.is_none(){ + equal = false; + }else{ + let other_value = other_value.unwrap_unchecked(); + if value != other_value{ + equal = false; + } + } + } + } + } + + equal + } +} + +// docs:start:default +impl Default for UHashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +// docs:end:default + UHashMap::with_hasher(B::default()) + } +} diff --git a/noir_stdlib/src/compat.nr b/noir_stdlib/src/compat.nr index 06da8150767..92e15bae30e 100644 --- a/noir_stdlib/src/compat.nr +++ b/noir_stdlib/src/compat.nr @@ -1,7 +1,21 @@ -global BN254_MODULUS_BE_BYTES: [u8] = &[ +comptime global BN254_MODULUS_BE_BYTES: [u8] = &[ 48, 100, 78, 114, 225, 49, 160, 41, 184, 80, 69, 182, 129, 129, 88, 93, 40, 51, 232, 72, 121, 185, 112, 145, 67, 225, 245, 147, 240, 0, 0, 1 ]; pub fn is_bn254() -> bool { - crate::field::modulus_be_bytes() == BN254_MODULUS_BE_BYTES + comptime + { + // We can't use the `Eq` trait here due to limitations on calling non-comptime functions + // defined within the same crate. + let mut eq = true; + + let modulus_be_bytes = crate::field::modulus_be_bytes(); + // We can't do `BN254_MODULUS_BE_BYTES.len()` due to limitations on calling non-comptime functions. + assert_eq(crate::field::modulus_num_bits(), 254); + for i in 0..32 { + eq &= modulus_be_bytes[i] == BN254_MODULUS_BE_BYTES[i]; + } + + eq + } } diff --git a/noir_stdlib/src/embedded_curve_ops.nr b/noir_stdlib/src/embedded_curve_ops.nr index f54072d8cbd..6b70b6ddef0 100644 --- a/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir_stdlib/src/embedded_curve_ops.nr @@ -88,24 +88,28 @@ impl Eq for EmbeddedCurveScalar { // // The embedded curve being used is decided by the // underlying proof system. -#[foreign(multi_scalar_mul)] // docs:start:multi_scalar_mul pub fn multi_scalar_mul( points: [EmbeddedCurvePoint; N], scalars: [EmbeddedCurveScalar; N] -) -> [Field; 3] +) -> EmbeddedCurvePoint // docs:end:multi_scalar_mul -{} +{ + let point_array = multi_scalar_mul_array_return(points, scalars); + EmbeddedCurvePoint { x: point_array[0], y: point_array[1], is_infinite: point_array[2] as bool } +} + +#[foreign(multi_scalar_mul)] +fn multi_scalar_mul_array_return(points: [EmbeddedCurvePoint; N], scalars: [EmbeddedCurveScalar; N]) -> [Field; 3] {} + +#[foreign(multi_scalar_mul)] +pub(crate) fn multi_scalar_mul_slice(points: [EmbeddedCurvePoint], scalars: [EmbeddedCurveScalar]) -> [Field; 3] {} // docs:start:fixed_base_scalar_mul -pub fn fixed_base_scalar_mul( - scalar_low: Field, - scalar_high: Field -) -> [Field; 3] +pub fn fixed_base_scalar_mul(scalar: EmbeddedCurveScalar) -> EmbeddedCurvePoint // docs:end:fixed_base_scalar_mul { let g1 = EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; - let scalar = EmbeddedCurveScalar { lo: scalar_low, hi: scalar_high }; multi_scalar_mul([g1], [scalar]) } diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index bcdc23f80dc..e8db0a30c38 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -23,7 +23,7 @@ fn compute_decomposition(x: Field) -> (Field, Field) { (low, high) } -unconstrained fn decompose_hint(x: Field) -> (Field, Field) { +unconstrained pub(crate) fn decompose_hint(x: Field) -> (Field, Field) { compute_decomposition(x) } diff --git a/noir_stdlib/src/field/mod.nr b/noir_stdlib/src/field/mod.nr index b876bcc967b..4b6deaa1106 100644 --- a/noir_stdlib/src/field/mod.nr +++ b/noir_stdlib/src/field/mod.nr @@ -84,19 +84,20 @@ impl Field { } #[builtin(modulus_num_bits)] -pub fn modulus_num_bits() -> u64 {} +pub comptime fn modulus_num_bits() -> u64 {} #[builtin(modulus_be_bits)] -pub fn modulus_be_bits() -> [u1] {} +pub comptime fn modulus_be_bits() -> [u1] {} #[builtin(modulus_le_bits)] -pub fn modulus_le_bits() -> [u1] {} +pub comptime fn modulus_le_bits() -> [u1] {} #[builtin(modulus_be_bytes)] -pub fn modulus_be_bytes() -> [u8] {} +pub comptime fn modulus_be_bytes() -> [u8] {} #[builtin(modulus_le_bytes)] -pub fn modulus_le_bytes() -> [u8] {} +pub comptime fn modulus_le_bytes() -> [u8] {} + // Convert a 32 byte array to a field element by modding pub fn bytes32_to_field(bytes32: [u8; 32]) -> Field { // Convert it to a field element diff --git a/noir_stdlib/src/hash/keccak.nr b/noir_stdlib/src/hash/keccak.nr new file mode 100644 index 00000000000..bb8a9cc2ce2 --- /dev/null +++ b/noir_stdlib/src/hash/keccak.nr @@ -0,0 +1,142 @@ +global LIMBS_PER_BLOCK = 17; //BLOCK_SIZE / 8; +global NUM_KECCAK_LANES = 25; +global BLOCK_SIZE = 136; //(1600 - BITS * 2) / WORD_SIZE; +global WORD_SIZE = 8; + +use crate::collections::vec::Vec; + +#[foreign(keccakf1600)] +fn keccakf1600(input: [u64; 25]) -> [u64; 25] {} + +#[no_predicates] +pub(crate) fn keccak256(mut input: [u8; N], message_size: u32) -> [u8; 32] { + assert(N >= message_size); + for i in 0..N { + if i >= message_size { + input[i] = 0; + } + } + + //1. format_input_lanes + let max_blocks = (N + BLOCK_SIZE) / BLOCK_SIZE; + //maximum number of bytes to hash + let max_blocks_length = (BLOCK_SIZE * (max_blocks)); + let real_max_blocks = (message_size + BLOCK_SIZE) / BLOCK_SIZE; + let real_blocks_bytes = real_max_blocks * BLOCK_SIZE; + + let mut block_bytes = [0; BLOCK_SIZE]; + for i in 0..N { + block_bytes[i] = input[i]; + } + + block_bytes[message_size] = 1; + block_bytes[real_blocks_bytes - 1] = 0x80; + + // keccak lanes interpret memory as little-endian integers, + // means we need to swap our byte ordering + let num_limbs = max_blocks * LIMBS_PER_BLOCK; //max_blocks_length / WORD_SIZE; + for i in 0..num_limbs { + let mut temp = [0; 8]; + for j in 0..8 { + temp[j] = block_bytes[8*i+j]; + } + for j in 0..8 { + block_bytes[8 * i + j] = temp[7 - j]; + } + } + let byte_size = max_blocks_length; + let mut sliced_buffer = Vec::new(); + for _i in 0..num_limbs { + sliced_buffer.push(0); + } + // populate a vector of 64-bit limbs from our byte array + for i in 0..num_limbs { + let mut sliced = 0; + if (i * WORD_SIZE + WORD_SIZE > byte_size) { + let slice_size = byte_size - (i * WORD_SIZE); + let byte_shift = (WORD_SIZE - slice_size) * 8; + let mut v = 1; + for k in 0..slice_size { + sliced += v * (block_bytes[i * WORD_SIZE+7-k] as Field); + v *= 256; + } + let w = 1 << (byte_shift as u8); + sliced *= w as Field; + } else { + let mut v = 1; + for k in 0..WORD_SIZE { + sliced += v * (block_bytes[i * WORD_SIZE+7-k] as Field); + v *= 256; + } + } + sliced_buffer.set(i, sliced as u64); + } + + //2. sponge_absorb + let num_blocks = max_blocks; + let mut state : [u64;NUM_KECCAK_LANES]= [0; NUM_KECCAK_LANES]; + let mut under_block = true; + for i in 0..num_blocks { + if i == real_max_blocks { + under_block = false; + } + if under_block { + if (i == 0) { + for j in 0..LIMBS_PER_BLOCK { + state[j] = sliced_buffer.get(j); + } + } else { + for j in 0..LIMBS_PER_BLOCK { + state[j] = state[j] ^ sliced_buffer.get(i * LIMBS_PER_BLOCK + j); + } + } + state = keccakf1600(state); + } + } + + //3. sponge_squeeze + let mut result = [0; 32]; + for i in 0..4 { + let lane = state[i] as Field; + let lane_le = lane.to_le_bytes(8); + for j in 0..8 { + result[8*i+j] = lane_le[j]; + } + } + result +} + +mod tests { + use crate::hash::keccak::keccak256; + + #[test] + fn smoke_test() { + let input = [0xbd]; + let result = [ + 0x5a, 0x50, 0x2f, 0x9f, 0xca, 0x46, 0x7b, 0x26, 0x6d, 0x5b, 0x78, 0x33, 0x65, 0x19, 0x37, 0xe8, 0x05, 0x27, 0x0c, 0xa3, 0xf3, 0xaf, 0x1c, 0x0d, 0xd2, 0x46, 0x2d, 0xca, 0x4b, 0x3b, 0x1a, 0xbf + ]; + assert_eq(keccak256(input, input.len()), result); + } + + #[test] + fn hash_hello_world() { + // "hello world" + let input = [72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33]; + let result = [ + 0xec, 0xd0, 0xe1, 0x8, 0xa9, 0x8e, 0x19, 0x2a, 0xf1, 0xd2, 0xc2, 0x50, 0x55, 0xf4, 0xe3, 0xbe, 0xd7, 0x84, 0xb5, 0xc8, 0x77, 0x20, 0x4e, 0x73, 0x21, 0x9a, 0x52, 0x3, 0x25, 0x1f, 0xea, 0xab + ]; + assert_eq(keccak256(input, input.len()), result); + } + + #[test] + fn var_size_hash() { + let input = [ + 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223 + ]; + let result = [ + 226, 37, 115, 94, 94, 196, 72, 116, 194, 105, 79, 233, 65, 12, 30, 94, 181, 131, 170, 219, 171, 166, 236, 88, 143, 67, 255, 160, 248, 214, 39, 129 + ]; + assert_eq(keccak256(input, 13), result); + } +} + diff --git a/noir_stdlib/src/hash/mod.nr b/noir_stdlib/src/hash/mod.nr index 65f3b9419ff..320b89353d9 100644 --- a/noir_stdlib/src/hash/mod.nr +++ b/noir_stdlib/src/hash/mod.nr @@ -1,11 +1,13 @@ mod poseidon; mod mimc; mod poseidon2; +mod keccak; use crate::default::Default; use crate::uint128::U128; use crate::sha256::{digest, sha256_var}; -use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul}; +use crate::collections::vec::Vec; +use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_slice}; #[foreign(sha256)] // docs:start:sha256 @@ -25,46 +27,64 @@ pub fn blake3(input: [u8; N]) -> [u8; 32] // docs:end:blake3 {} -#[no_predicates] // docs:start:pedersen_commitment pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { // docs:end:pedersen_commitment - let value = pedersen_commitment_with_separator(input, 0); - if (value.x == 0) & (value.y == 0) { + pedersen_commitment_with_separator(input, 0) +} + +pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field { + __pedersen_hash_with_separator(input, separator) +} + +fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { + let value = __pedersen_commitment_with_separator(input, separator); + if (value[0] == 0) & (value[1] == 0) { EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true } } else { - EmbeddedCurvePoint { x: value.x, y: value.y, is_infinite: false } + EmbeddedCurvePoint { x: value[0], y: value[1], is_infinite: false } } } fn pedersen_commitment_with_separator_noir(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N]; for i in 0..N { - points[i] = EmbeddedCurveScalar::from_field(input[i]); + // we use the unsafe version because the multi_scalar_mul will constraint the scalars. + points[i] = from_field_unsafe(input[i]); } let generators = derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator); - let values = multi_scalar_mul(generators, points); - EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: values[2] as bool } + multi_scalar_mul(generators, points) } -#[no_predicates] -pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { - let values = __pedersen_commitment_with_separator(input, separator); - EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: false } +fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Field { + let mut scalars: Vec = Vec::from_slice([EmbeddedCurveScalar { lo: 0, hi: 0 }; N].as_slice()); //Vec::new(); + + for i in 0..N { + scalars.set(i, from_field_unsafe(input[i])); + } + scalars.push(EmbeddedCurveScalar { lo: N as Field, hi: 0 }); + let domain_generators :[EmbeddedCurvePoint; N]= derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator); + let mut vec_generators = Vec::from_slice(domain_generators.as_slice()); + let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0); + vec_generators.push(length_generator[0]); + multi_scalar_mul_slice(vec_generators.slice, scalars.slice)[0] } // docs:start:pedersen_hash pub fn pedersen_hash(input: [Field; N]) -> Field // docs:end:pedersen_hash { - pedersen_hash_with_separator(input, 0) + __pedersen_hash_with_separator(input, 0) } +#[foreign(pedersen_hash)] +fn __pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} + +#[foreign(pedersen_commitment)] +fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} + #[field(bn254)] -fn derive_generators( - domain_separator_bytes: [u8; M], - starting_index: u32 -) -> [EmbeddedCurvePoint; N] { +fn derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] { crate::assert_constant(domain_separator_bytes); crate::assert_constant(starting_index); __derive_generators(domain_separator_bytes, starting_index) @@ -72,23 +92,22 @@ fn derive_generators( #[builtin(derive_pedersen_generators)] #[field(bn254)] -fn __derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] {} +fn __derive_generators( + domain_separator_bytes: [u8; M], + starting_index: u32 +) -> [EmbeddedCurvePoint; N] {} -fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Field { - let v1 = pedersen_commitment_with_separator(input, separator); - let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0); - multi_scalar_mul( - [length_generator[0], v1], - [EmbeddedCurveScalar { lo: N as Field, hi: 0 }, EmbeddedCurveScalar { lo: 1, hi: 0 }] - )[0] +#[field(bn254)] + // Same as from_field but: + // does not assert the limbs are 128 bits + // does not assert the decomposition does not overflow the EmbeddedCurveScalar + fn from_field_unsafe(scalar: Field) -> EmbeddedCurveScalar { + let (xlo, xhi) = crate::field::bn254::decompose_hint(scalar); + // Check that the decomposition is correct + assert_eq(scalar, xlo + crate::field::bn254::TWO_POW_128 * xhi); + EmbeddedCurveScalar { lo: xlo, hi: xhi } } -#[foreign(pedersen_hash)] -pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} - -#[foreign(pedersen_commitment)] -fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} - pub fn hash_to_field(inputs: [Field]) -> Field { let mut sum = 0; @@ -100,11 +119,12 @@ pub fn hash_to_field(inputs: [Field]) -> Field { sum } -#[foreign(keccak256)] // docs:start:keccak256 pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] // docs:end:keccak256 -{} +{ + crate::hash::keccak::keccak256(input, message_size) +} #[foreign(poseidon2_permutation)] pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} @@ -263,10 +283,111 @@ impl Hash for (A, B, C, D, E) where A: Hash, B: Hash, C: Hash, D: } } +// Some test vectors for Pedersen hash and Pedersen Commitment. +// They have been generated using the same functions so the tests are for now useless +// but they will be useful when we switch to Noir implementation. #[test] -fn assert_pedersen_noir() { - // TODO: make this a fuzzer test once fuzzer supports curve-specific blackbox functions. - let input = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - assert_eq(pedersen_hash_with_separator(input, 4), pedersen_hash_with_separator_noir(input, 4)); - assert_eq(pedersen_commitment_with_separator(input, 4), pedersen_commitment_with_separator_noir(input, 4)); +fn assert_pedersen() { + assert_eq( + pedersen_hash_with_separator([1], 1), 0x1b3f4b1a83092a13d8d1a59f7acb62aba15e7002f4440f2275edb99ebbc2305f + ); + assert_eq( + pedersen_commitment_with_separator([1], 1), EmbeddedCurvePoint { + x: 0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402, + y: 0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126, + is_infinite: false + } + ); + + assert_eq( + pedersen_hash_with_separator([1, 2], 2), 0x26691c129448e9ace0c66d11f0a16d9014a9e8498ee78f4d69f0083168188255 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2], 2), EmbeddedCurvePoint { + x: 0x2e2b3b191e49541fe468ec6877721d445dcaffe41728df0a0eafeb15e87b0753, + y: 0x2ff4482400ad3a6228be17a2af33e2bcdf41be04795f9782bd96efe7e24f8778, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3], 3), 0x0bc694b7a1f8d10d2d8987d07433f26bd616a2d351bc79a3c540d85b6206dbe4 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3], 3), EmbeddedCurvePoint { + x: 0x1fee4e8cf8d2f527caa2684236b07c4b1bad7342c01b0f75e9a877a71827dc85, + y: 0x2f9fedb9a090697ab69bf04c8bc15f7385b3e4b68c849c1536e5ae15ff138fd1, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4], 4), 0xdae10fb32a8408521803905981a2b300d6a35e40e798743e9322b223a5eddc + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4], 4), EmbeddedCurvePoint { + x: 0x07ae3e202811e1fca39c2d81eabe6f79183978e6f12be0d3b8eda095b79bdbc9, + y: 0x0afc6f892593db6fbba60f2da558517e279e0ae04f95758587760ba193145014, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4, 5], 5), 0xfc375b062c4f4f0150f7100dfb8d9b72a6d28582dd9512390b0497cdad9c22 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4, 5], 5), EmbeddedCurvePoint { + x: 0x1754b12bd475a6984a1094b5109eeca9838f4f81ac89c5f0a41dbce53189bb29, + y: 0x2da030e3cfcdc7ddad80eaf2599df6692cae0717d4e9f7bfbee8d073d5d278f7, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4, 5, 6], 6), 0x1696ed13dc2730062a98ac9d8f9de0661bb98829c7582f699d0273b18c86a572 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6], 6), EmbeddedCurvePoint { + x: 0x190f6c0e97ad83e1e28da22a98aae156da083c5a4100e929b77e750d3106a697, + y: 0x1f4b60f34ef91221a0b49756fa0705da93311a61af73d37a0c458877706616fb, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7], 7), 0x128c0ff144fc66b6cb60eeac8a38e23da52992fc427b92397a7dffd71c45ede3 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7], 7), EmbeddedCurvePoint { + x: 0x015441e9d29491b06563fac16fc76abf7a9534c715421d0de85d20dbe2965939, + y: 0x1d2575b0276f4e9087e6e07c2cb75aa1baafad127af4be5918ef8a2ef2fea8fc, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8), 0x2f960e117482044dfc99d12fece2ef6862fba9242be4846c7c9a3e854325a55c + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8), EmbeddedCurvePoint { + x: 0x1657737676968887fceb6dd516382ea13b3a2c557f509811cd86d5d1199bc443, + y: 0x1f39f0cb569040105fa1e2f156521e8b8e08261e635a2b210bdc94e8d6d65f77, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9), 0x0c96db0790602dcb166cc4699e2d306c479a76926b81c2cb2aaa92d249ec7be7 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9), EmbeddedCurvePoint { + x: 0x0a3ceae42d14914a432aa60ec7fded4af7dad7dd4acdbf2908452675ec67e06d, + y: 0xfc19761eaaf621ad4aec9a8b2e84a4eceffdba78f60f8b9391b0bd9345a2f2, + is_infinite: false + } + ); + assert_eq( + pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10), 0x2cd37505871bc460a62ea1e63c7fe51149df5d0801302cf1cbc48beb8dff7e94 + ); + assert_eq( + pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10), EmbeddedCurvePoint { + x: 0x2fb3f8b3d41ddde007c8c3c62550f9a9380ee546fcc639ffbb3fd30c8d8de30c, + y: 0x300783be23c446b11a4c0fabf6c91af148937cea15fcf5fb054abf7f752ee245, + is_infinite: false + } + ); } + diff --git a/noir_stdlib/src/meta/mod.nr b/noir_stdlib/src/meta/mod.nr index 1825888130b..395f09a453e 100644 --- a/noir_stdlib/src/meta/mod.nr +++ b/noir_stdlib/src/meta/mod.nr @@ -1 +1,11 @@ +mod trait_constraint; +mod trait_def; mod type_def; +mod quoted; + +/// Calling unquote as a macro (via `unquote!(arg)`) will unquote +/// its argument. Since this is the effect `!` already does, `unquote` +/// itself does not need to do anything besides return its argument. +pub comptime fn unquote(code: Quoted) -> Quoted { + code +} diff --git a/noir_stdlib/src/meta/quoted.nr b/noir_stdlib/src/meta/quoted.nr new file mode 100644 index 00000000000..6273d64b10c --- /dev/null +++ b/noir_stdlib/src/meta/quoted.nr @@ -0,0 +1,4 @@ +impl Quoted { + #[builtin(quoted_as_trait_constraint)] + fn as_trait_constraint(self) -> TraitConstraint {} +} diff --git a/noir_stdlib/src/meta/trait_constraint.nr b/noir_stdlib/src/meta/trait_constraint.nr new file mode 100644 index 00000000000..f0276608974 --- /dev/null +++ b/noir_stdlib/src/meta/trait_constraint.nr @@ -0,0 +1,20 @@ +use crate::hash::{Hash, Hasher}; +use crate::cmp::Eq; + +impl Eq for TraitConstraint { + fn eq(self, other: Self) -> bool { + constraint_eq(self, other) + } +} + +impl Hash for TraitConstraint { + fn hash(self, state: &mut H) where H: Hasher { + state.write(constraint_hash(self)); + } +} + +#[builtin(trait_constraint_eq)] +fn constraint_eq(_first: TraitConstraint, _second: TraitConstraint) -> bool {} + +#[builtin(trait_constraint_hash)] +fn constraint_hash(_constraint: TraitConstraint) -> Field {} diff --git a/noir_stdlib/src/meta/trait_def.nr b/noir_stdlib/src/meta/trait_def.nr new file mode 100644 index 00000000000..5de7631e34d --- /dev/null +++ b/noir_stdlib/src/meta/trait_def.nr @@ -0,0 +1,4 @@ +impl TraitDefinition { + #[builtin(trait_def_as_trait_constraint)] + fn as_trait_constraint(_self: Self) -> TraitConstraint {} +} diff --git a/scripts/redo-typo-pr.sh b/scripts/redo-typo-pr.sh index 416be65a449..4b3b93b48ed 100755 --- a/scripts/redo-typo-pr.sh +++ b/scripts/redo-typo-pr.sh @@ -16,16 +16,21 @@ gh pr checkout $ORIGINAL_PR_NUMBER echo "Creating new local branch $NEW_BRANCH" git checkout -b $NEW_BRANCH -# Step 3: Push the new branch to GitHub +# Step 3: Squash commits +echo "Squashing new local branch $NEW_BRANCH" +git reset --soft master +git add . +git commit -m "chore: typo fixes" + +# Step 4: Push the new branch to GitHub echo "Pushing new branch $NEW_BRANCH to GitHub" -git commit --amend --no-edit git push origin $NEW_BRANCH -# Step 4: create a new pull request +# Step 5: create a new pull request echo "Creating a new pull request for $NEW_BRANCH" gh pr create --base master --head $NEW_BRANCH --title "chore: redo typo PR by $AUTHOR" --body "Thanks $AUTHOR for https://github.com/$REPO/pull/$ORIGINAL_PR_NUMBER. Our policy is to redo typo changes to dissuade metric farming. This is an automated script." -# Step 5: Close the original PR +# Step 6: Close the original PR echo "Closing original PR #$ORIGINAL_PR_NUMBER" gh pr close $ORIGINAL_PR_NUMBER diff --git a/test_programs/compile_success_empty/attribute_args/Nargo.toml b/test_programs/compile_success_empty/attribute_args/Nargo.toml new file mode 100644 index 00000000000..8efe5d203d1 --- /dev/null +++ b/test_programs/compile_success_empty/attribute_args/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "attribute_args" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/attribute_args/src/main.nr b/test_programs/compile_success_empty/attribute_args/src/main.nr new file mode 100644 index 00000000000..44b9c20460f --- /dev/null +++ b/test_programs/compile_success_empty/attribute_args/src/main.nr @@ -0,0 +1,20 @@ +#[attr_with_args(a b, c d)] +#[varargs(one, two)] +#[varargs(one, two, three, four)] +struct Foo {} + +comptime fn attr_with_args(s: StructDefinition, a: Quoted, b: Quoted) { + // Ensure all variables are in scope. + // We can't print them since that breaks the test runner. + let _ = s; + let _ = a; + let _ = b; +} + +comptime fn varargs(s: StructDefinition, t: [Quoted]) { + let _ = s; + for _ in t {} + assert(t.len() < 5); +} + +fn main() {} diff --git a/test_programs/compile_success_empty/comptime_trait_constraint/Nargo.toml b/test_programs/compile_success_empty/comptime_trait_constraint/Nargo.toml new file mode 100644 index 00000000000..c7e28c053a7 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_trait_constraint/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_trait_constraint" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr b/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr new file mode 100644 index 00000000000..5c99f8c587e --- /dev/null +++ b/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr @@ -0,0 +1,39 @@ +use std::hash::{Hash, Hasher}; + +trait TraitWithGenerics { + fn foo(self) -> (A, B); +} + +fn main() { + comptime + { + let constraint1 = quote { Default }.as_trait_constraint(); + let constraint2 = quote { TraitWithGenerics }.as_trait_constraint(); + + assert(constraint1 != constraint2); + + let mut hasher = TestHasher { result: 0 }; + constraint1.hash(&mut hasher); + let hash1 = hasher.finish(); + + let mut hasher = TestHasher { result: 0 }; + constraint2.hash(&mut hasher); + let hash2 = hasher.finish(); + + assert(hash1 != hash2); + } +} + +comptime struct TestHasher { + result: Field, +} + +comptime impl Hasher for TestHasher { + comptime fn finish(self) -> Field { + self.result + } + + comptime fn write(&mut self, input: Field) { + self.result += input; + } +} diff --git a/test_programs/compile_success_empty/comptime_traits/src/main.nr b/test_programs/compile_success_empty/comptime_traits/src/main.nr index 143c9cda274..8b1f81e6594 100644 --- a/test_programs/compile_success_empty/comptime_traits/src/main.nr +++ b/test_programs/compile_success_empty/comptime_traits/src/main.nr @@ -1,3 +1,5 @@ +use std::ops::Neg; + fn main() { comptime { @@ -13,3 +15,22 @@ fn main() { assert([1, 2] != array); } } + +struct MyType { + value: i32, +} + +comptime impl Neg for MyType { + comptime fn neg(self) -> Self { + self + } +} + +fn neg_at_comptime() { + comptime + { + let value = MyType { value: 1 }; + let _result = -value; + } +} + diff --git a/test_programs/compile_success_empty/function_attribute/Nargo.toml b/test_programs/compile_success_empty/function_attribute/Nargo.toml new file mode 100644 index 00000000000..94b5c5da6a8 --- /dev/null +++ b/test_programs/compile_success_empty/function_attribute/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "function_attribute" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/function_attribute/src/main.nr b/test_programs/compile_success_empty/function_attribute/src/main.nr new file mode 100644 index 00000000000..ec22b730d3f --- /dev/null +++ b/test_programs/compile_success_empty/function_attribute/src/main.nr @@ -0,0 +1,18 @@ +#[function_attr] +fn foo() {} + +struct Foo {} + +comptime fn function_attr(_f: FunctionDefinition) -> Quoted { + quote { + impl Default for Foo { + fn default() -> Foo { + Foo {} + } + } + } +} + +fn main() { + let _ = Foo::default(); +} diff --git a/test_programs/compile_success_empty/macros_in_comptime/Nargo.toml b/test_programs/compile_success_empty/macros_in_comptime/Nargo.toml new file mode 100644 index 00000000000..831fa270863 --- /dev/null +++ b/test_programs/compile_success_empty/macros_in_comptime/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "macros_in_comptime" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/macros_in_comptime/src/main.nr b/test_programs/compile_success_empty/macros_in_comptime/src/main.nr new file mode 100644 index 00000000000..52567025e23 --- /dev/null +++ b/test_programs/compile_success_empty/macros_in_comptime/src/main.nr @@ -0,0 +1,49 @@ +use std::field::modulus_num_bits; +use std::meta::unquote; + +fn main() { + comptime + { + foo::<3>(5); + submodule::bar(); + } +} + +// Call a different function from the interpreter, then have the +// elaborator switch to the middle of foo from its previous scope in main +unconstrained comptime fn foo(x: Field) { + assert(modulus_num_bits() != 0); + + let cond = quote { modulus_num_bits() != 0 }; + assert(unquote!(cond)); + + // Use a comptime parameter in scope + assert_eq(5, x); + assert_eq(5, unquote!(quote { x })); + + // Use a generic in scope + assert_eq(3, N); + assert_eq(3, unquote!(quote { N })); + + // Use `break` which only unconstrained functions can do. + // This ensures the elaborator knows we're switching from `main` to `foo` + for _ in 0..0 { + break; + } + + let loop = quote { for _ in 0..0 { break; } }; + unquote!(loop); +} + +mod submodule { + use std::field::modulus_be_bytes; + use std::meta::unquote; + + pub comptime fn bar() { + // Use a function only in scope in this module + assert(modulus_be_bytes().len() != 0); + + let cond = quote { modulus_be_bytes().len() != 0 }; + assert(unquote!(cond)); + } +} diff --git a/test_programs/compile_success_empty/regression_4635/src/main.nr b/test_programs/compile_success_empty/regression_4635/src/main.nr index 350b60ba3f7..75188f797dd 100644 --- a/test_programs/compile_success_empty/regression_4635/src/main.nr +++ b/test_programs/compile_success_empty/regression_4635/src/main.nr @@ -42,8 +42,8 @@ struct MyStruct { a: T } -impl Deserialize<1> for MyStruct { - fn deserialize(fields: [Field; 1]) -> Self where T: FromField { +impl Deserialize<1> for MyStruct where T: FromField { + fn deserialize(fields: [Field; 1]) -> Self { Self{ a: FromField::from_field(fields[0]) } } } diff --git a/test_programs/compile_success_empty/regression_5428/Nargo.toml b/test_programs/compile_success_empty/regression_5428/Nargo.toml new file mode 100644 index 00000000000..7507b934d66 --- /dev/null +++ b/test_programs/compile_success_empty/regression_5428/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_5428" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/regression_5428/src/main.nr b/test_programs/compile_success_empty/regression_5428/src/main.nr new file mode 100644 index 00000000000..f01b89cbea4 --- /dev/null +++ b/test_programs/compile_success_empty/regression_5428/src/main.nr @@ -0,0 +1,9 @@ +fn main() { + assert_true!(); +} + +comptime fn assert_true() -> Quoted { + let first = quote { assert( }; + let second = quote { true); }; + first.append(second) +} diff --git a/test_programs/compile_success_empty/trait_as_constraint/Nargo.toml b/test_programs/compile_success_empty/trait_as_constraint/Nargo.toml new file mode 100644 index 00000000000..907b5ce09ed --- /dev/null +++ b/test_programs/compile_success_empty/trait_as_constraint/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "trait_as_constraint" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/trait_as_constraint/src/main.nr b/test_programs/compile_success_empty/trait_as_constraint/src/main.nr new file mode 100644 index 00000000000..1911f045c27 --- /dev/null +++ b/test_programs/compile_success_empty/trait_as_constraint/src/main.nr @@ -0,0 +1,9 @@ +#[test_as_constraint] +trait Foo {} + +comptime fn test_as_constraint(t: TraitDefinition) { + let constraint = t.as_trait_constraint(); + assert(constraint == constraint); +} + +fn main() {} diff --git a/test_programs/compile_success_empty/trait_attribute/Nargo.toml b/test_programs/compile_success_empty/trait_attribute/Nargo.toml new file mode 100644 index 00000000000..c72fe5e3e89 --- /dev/null +++ b/test_programs/compile_success_empty/trait_attribute/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "trait_attribute" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/trait_attribute/src/main.nr b/test_programs/compile_success_empty/trait_attribute/src/main.nr new file mode 100644 index 00000000000..87f4893e3e5 --- /dev/null +++ b/test_programs/compile_success_empty/trait_attribute/src/main.nr @@ -0,0 +1,19 @@ +#[trait_attr] +trait Foo { + fn foo(self) -> Self; +} + +comptime fn trait_attr(_t: TraitDefinition) -> Quoted { + quote { + impl Foo for Field { + fn foo(self) -> Self { + self + 1 + } + } + } +} + +fn main() { + assert_eq(1.foo(), 2); + assert_eq(10.foo(), 11); +} diff --git a/test_programs/compile_success_empty/trait_call_full_path/Nargo.toml b/test_programs/compile_success_empty/trait_call_full_path/Nargo.toml new file mode 100644 index 00000000000..f6d20fc3e32 --- /dev/null +++ b/test_programs/compile_success_empty/trait_call_full_path/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "trait_call_full_path" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/trait_call_full_path/src/main.nr b/test_programs/compile_success_empty/trait_call_full_path/src/main.nr new file mode 100644 index 00000000000..2d4b003f2ad --- /dev/null +++ b/test_programs/compile_success_empty/trait_call_full_path/src/main.nr @@ -0,0 +1,20 @@ +mod foo { + trait Trait { + fn me(self) -> Self; + } + + impl Trait for Field { + fn me(self) -> Self { + self + } + } +} + +use foo::Trait; +use foo::Trait::me; + +fn main(x: Field) { + let _ = foo::Trait::me(x); + let _ = Trait::me(x); + let _ = me(x); +} diff --git a/test_programs/execution_success/trait_method_mut_self/Nargo.toml b/test_programs/compile_success_empty/trait_method_mut_self/Nargo.toml similarity index 100% rename from test_programs/execution_success/trait_method_mut_self/Nargo.toml rename to test_programs/compile_success_empty/trait_method_mut_self/Nargo.toml diff --git a/test_programs/execution_success/trait_method_mut_self/Prover.toml b/test_programs/compile_success_empty/trait_method_mut_self/Prover.toml similarity index 100% rename from test_programs/execution_success/trait_method_mut_self/Prover.toml rename to test_programs/compile_success_empty/trait_method_mut_self/Prover.toml diff --git a/test_programs/execution_success/trait_method_mut_self/src/main.nr b/test_programs/compile_success_empty/trait_method_mut_self/src/main.nr similarity index 100% rename from test_programs/execution_success/trait_method_mut_self/src/main.nr rename to test_programs/compile_success_empty/trait_method_mut_self/src/main.nr diff --git a/test_programs/execution_success/turbofish_call_func_diff_types/Nargo.toml b/test_programs/compile_success_empty/turbofish_call_func_diff_types/Nargo.toml similarity index 100% rename from test_programs/execution_success/turbofish_call_func_diff_types/Nargo.toml rename to test_programs/compile_success_empty/turbofish_call_func_diff_types/Nargo.toml diff --git a/test_programs/execution_success/turbofish_call_func_diff_types/Prover.toml b/test_programs/compile_success_empty/turbofish_call_func_diff_types/Prover.toml similarity index 100% rename from test_programs/execution_success/turbofish_call_func_diff_types/Prover.toml rename to test_programs/compile_success_empty/turbofish_call_func_diff_types/Prover.toml diff --git a/test_programs/execution_success/turbofish_call_func_diff_types/src/main.nr b/test_programs/compile_success_empty/turbofish_call_func_diff_types/src/main.nr similarity index 100% rename from test_programs/execution_success/turbofish_call_func_diff_types/src/main.nr rename to test_programs/compile_success_empty/turbofish_call_func_diff_types/src/main.nr diff --git a/test_programs/compile_success_empty/unquote/Nargo.toml b/test_programs/compile_success_empty/unquote/Nargo.toml new file mode 100644 index 00000000000..68b2890e37a --- /dev/null +++ b/test_programs/compile_success_empty/unquote/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unquote" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/unquote/src/main.nr b/test_programs/compile_success_empty/unquote/src/main.nr new file mode 100644 index 00000000000..2717286b810 --- /dev/null +++ b/test_programs/compile_success_empty/unquote/src/main.nr @@ -0,0 +1,4 @@ +fn main() { + std::meta::unquote!(quote { assert(true); }); + assert(std::meta::unquote!(quote { true })); +} diff --git a/test_programs/compile_success_no_bug/check_uncostrained_regression/Nargo.toml b/test_programs/compile_success_no_bug/check_uncostrained_regression/Nargo.toml new file mode 100644 index 00000000000..3c6b5d9688c --- /dev/null +++ b/test_programs/compile_success_no_bug/check_uncostrained_regression/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "check_unconstrained_regression" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_no_bug/check_uncostrained_regression/src/main.nr b/test_programs/compile_success_no_bug/check_uncostrained_regression/src/main.nr new file mode 100644 index 00000000000..e93e068f432 --- /dev/null +++ b/test_programs/compile_success_no_bug/check_uncostrained_regression/src/main.nr @@ -0,0 +1,27 @@ +struct Trigger{ + x: u32, + y: Field, + z: [Field;3], +} +struct ResultType{ + a: u32, + b: Field, + c: [Field;3], +} + +unconstrained fn convert(trigger: Trigger) -> ResultType { + let result= ResultType { a: trigger.x + 1, b: trigger.y - 1 + trigger.z[2], c: [trigger.z[0], 0, trigger.z[1]] }; + result +} +impl Trigger { + fn execute(self) -> ResultType { + let result = convert(self); + assert(result.a == self.x + 1); + assert(result.b == self.y - 1 + self.z[2]); + assert(result.c[1] == 0); + result + } +} +fn main(x: Trigger) -> pub ResultType { + x.execute() +} diff --git a/test_programs/execution_success/bench_ecdsa_secp256k1/Nargo.toml b/test_programs/execution_success/bench_ecdsa_secp256k1/Nargo.toml new file mode 100644 index 00000000000..7e83251cc5a --- /dev/null +++ b/test_programs/execution_success/bench_ecdsa_secp256k1/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bench_ecdsa_secp256k1" +description = "ECDSA secp256k1 verification" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/bench_ecdsa_secp256k1/Prover.toml b/test_programs/execution_success/bench_ecdsa_secp256k1/Prover.toml new file mode 100644 index 00000000000..e78fc19cb71 --- /dev/null +++ b/test_programs/execution_success/bench_ecdsa_secp256k1/Prover.toml @@ -0,0 +1,169 @@ + +hashed_message = [ + 0x3a, + 0x73, + 0xf4, + 0x12, + 0x3a, + 0x5c, + 0xd2, + 0x12, + 0x1f, + 0x21, + 0xcd, + 0x7e, + 0x8d, + 0x35, + 0x88, + 0x35, + 0x47, + 0x69, + 0x49, + 0xd0, + 0x35, + 0xd9, + 0xc2, + 0xda, + 0x68, + 0x06, + 0xb4, + 0x63, + 0x3a, + 0xc8, + 0xc1, + 0xe2, +] +pub_key_x = [ + 0xa0, + 0x43, + 0x4d, + 0x9e, + 0x47, + 0xf3, + 0xc8, + 0x62, + 0x35, + 0x47, + 0x7c, + 0x7b, + 0x1a, + 0xe6, + 0xae, + 0x5d, + 0x34, + 0x42, + 0xd4, + 0x9b, + 0x19, + 0x43, + 0xc2, + 0xb7, + 0x52, + 0xa6, + 0x8e, + 0x2a, + 0x47, + 0xe2, + 0x47, + 0xc7, +] +pub_key_y = [ + 0x89, + 0x3a, + 0xba, + 0x42, + 0x54, + 0x19, + 0xbc, + 0x27, + 0xa3, + 0xb6, + 0xc7, + 0xe6, + 0x93, + 0xa2, + 0x4c, + 0x69, + 0x6f, + 0x79, + 0x4c, + 0x2e, + 0xd8, + 0x77, + 0xa1, + 0x59, + 0x3c, + 0xbe, + 0xe5, + 0x3b, + 0x03, + 0x73, + 0x68, + 0xd7, +] +signature = [ + 0xe5, + 0x08, + 0x1c, + 0x80, + 0xab, + 0x42, + 0x7d, + 0xc3, + 0x70, + 0x34, + 0x6f, + 0x4a, + 0x0e, + 0x31, + 0xaa, + 0x2b, + 0xad, + 0x8d, + 0x97, + 0x98, + 0xc3, + 0x80, + 0x61, + 0xdb, + 0x9a, + 0xe5, + 0x5a, + 0x4e, + 0x8d, + 0xf4, + 0x54, + 0xfd, + 0x28, + 0x11, + 0x98, + 0x94, + 0x34, + 0x4e, + 0x71, + 0xb7, + 0x87, + 0x70, + 0xcc, + 0x93, + 0x1d, + 0x61, + 0xf4, + 0x80, + 0xec, + 0xbb, + 0x0b, + 0x89, + 0xd6, + 0xeb, + 0x69, + 0x69, + 0x01, + 0x61, + 0xe4, + 0x9a, + 0x71, + 0x5f, + 0xcd, + 0x55, +] diff --git a/test_programs/execution_success/bench_ecdsa_secp256k1/src/main.nr b/test_programs/execution_success/bench_ecdsa_secp256k1/src/main.nr new file mode 100644 index 00000000000..60f182c7836 --- /dev/null +++ b/test_programs/execution_success/bench_ecdsa_secp256k1/src/main.nr @@ -0,0 +1,6 @@ +use dep::std; + +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} diff --git a/test_programs/execution_success/comptime_slice_equality/Nargo.toml b/test_programs/execution_success/comptime_slice_equality/Nargo.toml new file mode 100644 index 00000000000..72700d87d8b --- /dev/null +++ b/test_programs/execution_success/comptime_slice_equality/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_slice_equality" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/comptime_slice_equality/src/main.nr b/test_programs/execution_success/comptime_slice_equality/src/main.nr new file mode 100644 index 00000000000..83f82fca06f --- /dev/null +++ b/test_programs/execution_success/comptime_slice_equality/src/main.nr @@ -0,0 +1,6 @@ +fn main() { + comptime + { + assert_eq(&[1], &[1]); + } +} diff --git a/test_programs/execution_success/embedded_curve_ops/src/main.nr b/test_programs/execution_success/embedded_curve_ops/src/main.nr index 4eeda39c6aa..5372f73df23 100644 --- a/test_programs/execution_success/embedded_curve_ops/src/main.nr +++ b/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -4,8 +4,8 @@ fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Test that multi_scalar_mul correctly derives the public key let res = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); - assert(res[0] == pub_x); - assert(res[1] == pub_y); + assert(res.x == pub_x); + assert(res.y == pub_y); // Test that double function calling embedded_curve_add works as expected let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y, is_infinite: false }; @@ -18,5 +18,5 @@ fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let res = std::embedded_curve_ops::multi_scalar_mul([g1, g1], [scalar, scalar]); // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice - assert(double.x == res[0]); + assert(double.x == res.x); } diff --git a/test_programs/execution_success/poseidon_bn254_hash_width_3/Nargo.toml b/test_programs/execution_success/poseidon_bn254_hash_width_3/Nargo.toml new file mode 100644 index 00000000000..7047f0aeef2 --- /dev/null +++ b/test_programs/execution_success/poseidon_bn254_hash_width_3/Nargo.toml @@ -0,0 +1,9 @@ +[package] +name = "poseidon_bn254_hash_width_3" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" +# Test usage of `expression_width` field +expression_width = "3" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/poseidon_bn254_hash_width_3/Prover.toml b/test_programs/execution_success/poseidon_bn254_hash_width_3/Prover.toml new file mode 100644 index 00000000000..8eecf9a3db2 --- /dev/null +++ b/test_programs/execution_success/poseidon_bn254_hash_width_3/Prover.toml @@ -0,0 +1,4 @@ +x1 = [1,2] +y1 = "0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a" +x2 = [1,2,3,4] +y2 = "0x299c867db6c1fdd79dcefa40e4510b9837e60ebb1ce0663dbaa525df65250465" diff --git a/test_programs/execution_success/poseidon_bn254_hash_width_3/src/main.nr b/test_programs/execution_success/poseidon_bn254_hash_width_3/src/main.nr new file mode 100644 index 00000000000..bb441a1ace3 --- /dev/null +++ b/test_programs/execution_success/poseidon_bn254_hash_width_3/src/main.nr @@ -0,0 +1,9 @@ +use std::hash::poseidon; + +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { + let hash1 = poseidon::bn254::hash_2(x1); + assert(hash1 == y1); + + let hash2 = poseidon::bn254::hash_4(x2); + assert(hash2 == y2); +} diff --git a/test_programs/execution_success/regression_5045/src/main.nr b/test_programs/execution_success/regression_5045/src/main.nr index cf39b2f97e4..d1bc4f663fd 100644 --- a/test_programs/execution_success/regression_5045/src/main.nr +++ b/test_programs/execution_success/regression_5045/src/main.nr @@ -15,6 +15,6 @@ fn main(is_active: bool) { [a, bad], [EmbeddedCurveScalar { lo: 1, hi: 0 }, EmbeddedCurveScalar { lo: 1, hi: 0 }] ); - assert(e[0] != d.x); + assert(e.x != d.x); } } diff --git a/test_programs/execution_success/schnorr/src/main.nr b/test_programs/execution_success/schnorr/src/main.nr index 5bc0ca9fefb..cf22fd371d1 100644 --- a/test_programs/execution_success/schnorr/src/main.nr +++ b/test_programs/execution_success/schnorr/src/main.nr @@ -50,7 +50,7 @@ pub fn verify_signature_noir(public_key: embedded_curve_ops::EmbeddedCurvePoi let g1 = embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; let r = embedded_curve_ops::multi_scalar_mul([g1, public_key], [sig_s, sig_e]); // compare the _hashes_ rather than field elements modulo r - let pedersen_hash = std::hash::pedersen_hash([r[0], public_key.x, public_key.y]); + let pedersen_hash = std::hash::pedersen_hash([r.x, public_key.x, public_key.y]); let mut hash_input = [0; M]; let pde = pedersen_hash.to_be_bytes(32); @@ -62,7 +62,7 @@ pub fn verify_signature_noir(public_key: embedded_curve_ops::EmbeddedCurvePoi } let result = std::hash::blake2s(hash_input); - is_ok = (r[2] == 0); + is_ok = !r.is_infinite; for i in 0..32 { if result[i] != signature[32 + i] { is_ok = false; @@ -101,7 +101,7 @@ pub fn assert_valid_signature(public_key: embedded_curve_ops::EmbeddedCurvePo let g1 = embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; let r = embedded_curve_ops::multi_scalar_mul([g1, public_key], [sig_s, sig_e]); // compare the _hashes_ rather than field elements modulo r - let pedersen_hash = std::hash::pedersen_hash([r[0], public_key.x, public_key.y]); + let pedersen_hash = std::hash::pedersen_hash([r.x, public_key.x, public_key.y]); let mut hash_input = [0; M]; let pde = pedersen_hash.to_be_bytes(32); @@ -113,7 +113,7 @@ pub fn assert_valid_signature(public_key: embedded_curve_ops::EmbeddedCurvePo } let result = std::hash::blake2s(hash_input); - assert(r[2] == 0); + assert(!r.is_infinite); for i in 0..32 { assert(result[i] == signature[32 + i]); } diff --git a/test_programs/execution_success/simple_shield/src/main.nr b/test_programs/execution_success/simple_shield/src/main.nr index d84288b9fd6..fd2fc20d08f 100644 --- a/test_programs/execution_success/simple_shield/src/main.nr +++ b/test_programs/execution_success/simple_shield/src/main.nr @@ -10,12 +10,11 @@ fn main( to_pubkey_x: Field, to_pubkey_y: Field ) -> pub [Field; 2] { + let priv_key_as_scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Compute public key from private key to show ownership - let pubkey = std::embedded_curve_ops::fixed_base_scalar_mul(priv_key, 0); - let pubkey_x = pubkey[0]; - let pubkey_y = pubkey[1]; + let pubkey = std::embedded_curve_ops::fixed_base_scalar_mul(priv_key_as_scalar); // Compute input note commitment - let note_commitment = std::hash::pedersen_commitment([pubkey_x, pubkey_y]); + let note_commitment = std::hash::pedersen_commitment([pubkey.x, pubkey.y]); // Compute input note nullifier let nullifier = std::hash::pedersen_commitment([note_commitment.x, index, priv_key]); // Compute output note nullifier diff --git a/test_programs/execution_success/uhashmap/Nargo.toml b/test_programs/execution_success/uhashmap/Nargo.toml new file mode 100644 index 00000000000..0d898e53003 --- /dev/null +++ b/test_programs/execution_success/uhashmap/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "uhashmap" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/uhashmap/Prover.toml b/test_programs/execution_success/uhashmap/Prover.toml new file mode 100644 index 00000000000..84d4c0733e4 --- /dev/null +++ b/test_programs/execution_success/uhashmap/Prover.toml @@ -0,0 +1,26 @@ +# Input: 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/test_programs/execution_success/uhashmap/src/main.nr b/test_programs/execution_success/uhashmap/src/main.nr new file mode 100644 index 00000000000..395ed21b6b0 --- /dev/null +++ b/test_programs/execution_success/uhashmap/src/main.nr @@ -0,0 +1,352 @@ +use std::collections::umap::UHashMap; +use std::hash::BuildHasherDefault; +use std::hash::poseidon2::Poseidon2Hasher; + +type K = Field; +type V = Field; + +// It is more convenient and readable to use structs as input. +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_LEN = 6; + +global FIELD_CMP = |a: Field, b: Field| a.lt(b); + +global K_CMP = FIELD_CMP; +global V_CMP = FIELD_CMP; +global KV_CMP = |a: (K, V), b: (K, V)| a.0.lt(b.0); + +global ALLOCATE_HASHMAP = || -> UHashMap> + UHashMap::default(); + +unconstrained fn main(input: [Entry; HASHMAP_LEN]) { + test_sequential(input[0].key, input[0].value); + test_multiple_equal_insert(input[1].key, input[1].value); + test_value_override(input[2].key, input[2].value, input[3].value); + test_insert_and_methods(input); + test_hashmaps_equality(input); + test_retain(); + test_iterators(); + test_mut_iterators(); + + doc_tests(); +} + +// Insert, get, remove. +unconstrained fn test_sequential(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New UHashMap should be empty."); + + hashmap.insert(key, value); + assert(hashmap.len() == 1, "UHashMap after one insert should have a length of 1 element."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); + + hashmap.remove(key); + assert(hashmap.is_empty(), "UHashMap after one insert and corresponding removal should be empty."); + let got = hashmap.get(key); + assert(got.is_none(), "Value has been removed, but is still available (not none)."); +} + +// Insert same pair several times. +unconstrained fn test_multiple_equal_insert(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New UHashMap should be empty."); + + for _ in 0..HASHMAP_LEN { + hashmap.insert(key, value); + } + + let len = hashmap.len(); + assert(len == 1, f"UHashMap length must be 1, got {len}."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); +} + +// Override value for existing pair. +unconstrained fn test_value_override(key: K, value: V, new_value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New hashmap should be empty."); + + hashmap.insert(key, value); + hashmap.insert(key, new_value); + assert(hashmap.len() == 1, "UHashMap length is invalid."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(got == new_value, f"Expected {new_value}, but got {got}."); +} + +// Insert several distinct pairs and test auxiliary methods. +unconstrained fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New UHashMap should be empty."); + + for entry in input { + println(f"Inserting {entry}"); + hashmap.insert(entry.key, entry.value); + } + + println(hashmap.len()); + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); + + for entry in input { + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + } + + hashmap.clear(); + assert(hashmap.is_empty(), "UHashMap after clear() should be empty."); +} + +// Insert several pairs and test retaining. +unconstrained fn test_retain() { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New UHashMap should be empty."); + + let (key, value) = (5, 11); + hashmap.insert(key, value); + let (key, value) = (2, 13); + hashmap.insert(key, value); + let (key, value) = (11, 5); + hashmap.insert(key, value); + + let predicate = |key: K, value: V| -> bool {key * value == 55}; + hashmap.retain(predicate); + + assert(hashmap.len() == 2, "UHashMap should have retained 2 elements."); + assert(hashmap.get(2).is_none(), "Pair should have been removed, since it does not match predicate."); +} + +// Equality trait check. +unconstrained fn test_hashmaps_equality(input: [Entry; HASHMAP_LEN]) { + let mut hashmap_1 = ALLOCATE_HASHMAP(); + let mut hashmap_2 = ALLOCATE_HASHMAP(); + + for entry in input { + hashmap_1.insert(entry.key, entry.value); + hashmap_2.insert(entry.key, entry.value); + } + + assert(hashmap_1 == hashmap_2, "CtHashMaps should be equal."); + + hashmap_2.remove(input[0].key); + + assert(hashmap_1 != hashmap_2, "CtHashMaps should not be equal."); +} + +// Test entries, keys, values. +unconstrained fn test_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let keys: [K; 3] = hashmap.keys().as_array().sort_via(K_CMP); + let values: [V; 3] = hashmap.values().as_array().sort_via(V_CMP); + let entries: [(K, V); 3] = hashmap.entries().as_array().sort_via(KV_CMP); + + assert(keys == [2, 5, 11], "Got incorrect iteration of keys."); + assert(values == [3, 7, 13], "Got incorrect iteration of values."); + assert(entries == [(2, 3), (5, 7), (11, 13)], "Got incorrect iteration of entries."); +} + +// Test mutable iteration over keys, values and entries. +unconstrained fn test_mut_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let f = |k: K| -> K{ k * 3}; + hashmap.iter_keys_mut(f); + + let f = |v: V| -> V{ v * 5}; + hashmap.iter_values_mut(f); + + let keys: [K; 3] = hashmap.keys().as_array().sort_via(K_CMP); + let values: [V; 3] = hashmap.values().as_array().sort_via(V_CMP); + + assert(keys == [6, 15, 33], f"Got incorrect iteration of keys: {keys}"); + assert(values == [15, 35, 65], "Got incorrect iteration of values."); + + let f = |k: K, v: V| -> (K, V){(k * 2, v * 2)}; + hashmap.iter_mut(f); + + let entries: [(K, V); 3] = hashmap.entries().as_array().sort_via(KV_CMP); + + assert(entries == [(12, 30), (30, 70), (66, 130)], "Got incorrect iteration of entries."); +} + +// docs:start:type_alias +type MyMap = UHashMap>; +// docs:end:type_alias + +/// Tests examples from the stdlib cthashmap documentation +unconstrained fn doc_tests() { + // docs:start:default_example + let hashmap: UHashMap> = UHashMap::default(); + assert(hashmap.is_empty()); + // docs:end:default_example + + // docs:start:with_hasher_example + let my_hasher: BuildHasherDefault = Default::default(); + let hashmap: UHashMap> = UHashMap::with_hasher(my_hasher); + assert(hashmap.is_empty()); + // docs:end:with_hasher_example + + // docs:start:insert_example + let mut map: UHashMap> = UHashMap::default(); + map.insert(12, 42); + assert(map.len() == 1); + // docs:end:insert_example + + get_example(map); + + // docs:start:remove_example + map.remove(12); + assert(map.is_empty()); + + // If a key was not present in the map, remove does nothing + map.remove(12); + assert(map.is_empty()); + // docs:end:remove_example + + // docs:start:is_empty_example + assert(map.is_empty()); + + map.insert(1, 2); + assert(!map.is_empty()); + + map.remove(1); + assert(map.is_empty()); + // docs:end:is_empty_example + + // docs:start:len_example + // This is equivalent to checking map.is_empty() + assert(map.len() == 0); + + map.insert(1, 2); + map.insert(3, 4); + map.insert(5, 6); + assert(map.len() == 3); + + // 3 was already present as a key in the hash map, so the length is unchanged + map.insert(3, 7); + assert(map.len() == 3); + + map.remove(1); + assert(map.len() == 2); + // docs:end:len_example + + // docs:start:capacity_example + let empty_map: UHashMap> = UHashMap::default(); + assert(empty_map.len() == 0); + println(empty_map.capacity()); + // docs:end:capacity_example + + // docs:start:clear_example + assert(!map.is_empty()); + map.clear(); + assert(map.is_empty()); + // docs:end:clear_example + + // docs:start:contains_key_example + if map.contains_key(7) { + let value = map.get(7); + assert(value.is_some()); + } else { + println("No value for key 7!"); + } + // docs:end:contains_key_example + + entries_examples(map); + iter_examples(map); + + // docs:start:retain_example + map.retain(|k, v| (k != 0) & (v != 0)); + // docs:end:retain_example + + // docs:start:eq_example + let mut map1: UHashMap> = UHashMap::default(); + let mut map2: UHashMap> = UHashMap::default(); + + map1.insert(1, 2); + map1.insert(3, 4); + + map2.insert(3, 4); + map2.insert(1, 2); + + assert(map1 == map2); + // docs:end:eq_example +} + +// docs:start:get_example +fn get_example(map: UHashMap>) { + let x = map.get(12); + + if x.is_some() { + assert(x.unwrap() == 42); + } +} +// docs:end:get_example + +fn entries_examples(map: UHashMap>) { + // docs:start:entries_example + let entries = map.entries(); + + // The length of a hashmap may not be compile-time known, so we + // need to loop over its capacity instead + for i in 0..map.capacity() { + if i < entries.len() { + let (key, value) = entries[i]; + println(f"{key} -> {value}"); + } + } + // docs:end:entries_example + + // docs:start:keys_example + let keys = map.keys(); + + for key in keys { + let value = map.get(key).unwrap_unchecked(); + println(f"{key} -> {value}"); + } + // docs:end:keys_example + + // docs:start:values_example + let values = map.values(); + + for value in values { + println(f"Found value {value}"); + } + // docs:end:values_example +} + +unconstrained fn iter_examples(mut map: UHashMap>) { + // docs:start:iter_mut_example + // Add 1 to each key in the map, and double the value associated with that key. + map.iter_mut(|k, v| (k + 1, v * 2)); + // docs:end:iter_mut_example + + // docs:start:iter_keys_mut_example + // Double each key, leaving the value associated with that key untouched + map.iter_keys_mut(|k| k * 2); + // docs:end:iter_keys_mut_example + + // docs:start:iter_values_mut_example + // Halve each value + map.iter_values_mut(|v| v / 2); + // docs:end:iter_values_mut_example +} diff --git a/test_programs/noir_test_success/embedded_curve_ops/src/main.nr b/test_programs/noir_test_success/embedded_curve_ops/src/main.nr index 225e86397fd..0c2c333fa62 100644 --- a/test_programs/noir_test_success/embedded_curve_ops/src/main.nr +++ b/test_programs/noir_test_success/embedded_curve_ops/src/main.nr @@ -10,28 +10,28 @@ use std::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_sca let s1 = EmbeddedCurveScalar { lo: 1, hi: 0 }; let a = multi_scalar_mul([g1], [s1]); - assert(a[2] == 0); + assert(!a.is_infinite); assert(g1 + zero == g1); assert(g1 - g1 == zero); assert(g1 - zero == g1); assert(zero + zero == zero); assert( multi_scalar_mul([g1], [s1]) - == [1, 17631683881184975370165255887551781615748388533673675138860, 0] + == EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false } ); - assert(multi_scalar_mul([g1, g1], [s1, s1]) == [g2.x, g2.y, 0]); + assert(multi_scalar_mul([g1, g1], [s1, s1]) == g2); assert( multi_scalar_mul( [g1, zero], [EmbeddedCurveScalar { lo: 2, hi: 0 }, EmbeddedCurveScalar { lo: 42, hi: 25 }] ) - == [g2.x, g2.y, 0] + == g2 ); assert( multi_scalar_mul( [g1, g1, zero], [s1, s1, EmbeddedCurveScalar { lo: 42, hi: 25 }] ) - == [g2.x, g2.y, 0] + == g2 ); } diff --git a/test_programs/rebuild.sh b/test_programs/rebuild.sh index 13479f58b4b..a70f69d531d 100755 --- a/test_programs/rebuild.sh +++ b/test_programs/rebuild.sh @@ -45,7 +45,7 @@ rm -rf $current_dir/acir_artifacts mkdir -p $current_dir/acir_artifacts # Gather directories to process. -dirs_to_process=() +# dirs_to_process=() for dir in $base_path/*; do if [[ ! -d $dir ]] || [[ " ${excluded_dirs[@]} " =~ " $(basename "$dir") " ]]; then continue diff --git a/tooling/debugger/Cargo.toml b/tooling/debugger/Cargo.toml index 05b28f9d95a..540d6d11bc0 100644 --- a/tooling/debugger/Cargo.toml +++ b/tooling/debugger/Cargo.toml @@ -14,7 +14,7 @@ build-data.workspace = true acvm.workspace = true fm.workspace = true nargo.workspace = true -noirc_frontend.workspace = true +noirc_frontend = { workspace = true, features = ["bn254"] } noirc_printable_type.workspace = true noirc_errors.workspace = true noirc_driver.workspace = true diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt index a3971d437fb..745971d9b28 100644 --- a/tooling/debugger/ignored-tests.txt +++ b/tooling/debugger/ignored-tests.txt @@ -1,18 +1,6 @@ -bigint brillig_references -brillig_to_bytes_integration debug_logs -fold_after_inlined_calls -fold_basic -fold_basic_nested_call -fold_call_witness_condition -fold_complex_outputs -fold_distinct_return -fold_fibonacci -fold_numeric_generic_poseidon is_unconstrained macros -modulus references -regression_4709 -to_bytes_integration +regression_4709 \ No newline at end of file diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index cb36988bf0b..7cdbe515649 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -1,10 +1,12 @@ use crate::foreign_calls::DebugForeignCallExecutor; +use acvm::acir::brillig::BitSize; use acvm::acir::circuit::brillig::BrilligBytecode; use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; -use acvm::acir::native_types::{Witness, WitnessMap}; +use acvm::acir::native_types::{Witness, WitnessMap, WitnessStack}; use acvm::brillig_vm::MemoryValue; use acvm::pwg::{ - ACVMStatus, BrilligSolver, BrilligSolverStatus, ForeignCallWaitInfo, StepResult, ACVM, + ACVMStatus, AcirCallWaitInfo, BrilligSolver, BrilligSolverStatus, ForeignCallWaitInfo, + OpcodeNotSolvable, StepResult, ACVM, }; use acvm::{BlackBoxFunctionSolver, FieldElement}; @@ -15,56 +17,235 @@ use nargo::NargoError; use noirc_artifacts::debug::{DebugArtifact, StackFrame}; use noirc_driver::DebugFile; +use thiserror::Error; + use std::collections::BTreeMap; use std::collections::{hash_set::Iter, HashSet}; +/// A Noir program is composed by +/// `n` ACIR circuits +/// |_ `m` ACIR opcodes +/// |_ Acir call +/// |_ Acir Brillig function invocation +/// |_ `p` Brillig opcodes +/// +/// The purpose of this structure is to map the opcode locations in ACIR circuits into +/// a flat contiguous address space to be able to expose them to the DAP interface. +/// In this address space, the ACIR circuits are laid out one after the other, and +/// Brillig functions called from such circuits are expanded inline, replacing +/// the `BrilligCall` ACIR opcode. +/// +/// `addresses: Vec>` +/// * The outer vec is `n` sized - one element per ACIR circuit +/// * Each nested vec is `m` sized - one element per ACIR opcode in circuit +/// * Each element is the "virtual address" of such opcode +/// +/// For flattening we map each ACIR circuit and ACIR opcode with a sequential address number +/// We start by assigning 0 to the very first ACIR opcode and then start accumulating by +/// traversing by depth-first +/// +/// Even if the address space is continuous, the `addresses` tree only +/// keeps track of the ACIR opcodes, since the Brillig opcode addresses can be +/// calculated from the initial opcode address. +/// As a result the flattened indexed addresses list may have "holes". +/// +/// If between two consequent `addresses` nodes there is a "hole" (an address jump), +/// this means that the first one is actually a ACIR Brillig call +/// which has as many brillig opcodes as `second_address - first_address` +/// +#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub struct AddressMap { + addresses: Vec>, + + // virtual address of the last opcode of the program + last_valid_address: usize, +} + +impl AddressMap { + pub(super) fn new( + circuits: &[Circuit], + unconstrained_functions: &[BrilligBytecode], + ) -> Self { + let opcode_address_size = |opcode: &Opcode| { + if let Opcode::BrilligCall { id, .. } = opcode { + unconstrained_functions[*id as usize].bytecode.len() + } else { + 1 + } + }; + + let mut addresses = Vec::with_capacity(circuits.len()); + let mut next_address = 0usize; + + for circuit in circuits { + let mut circuit_addresses = Vec::with_capacity(circuit.opcodes.len()); + for opcode in &circuit.opcodes { + circuit_addresses.push(next_address); + next_address += opcode_address_size(opcode); + } + addresses.push(circuit_addresses); + } + + Self { addresses, last_valid_address: next_address - 1 } + } + + /// Returns the absolute address of the opcode at the given location. + /// Absolute here means accounting for nested Brillig opcodes in BrilligCall + /// opcodes. + pub fn debug_location_to_address(&self, location: &DebugLocation) -> usize { + let circuit_addresses = &self.addresses[location.circuit_id as usize]; + match &location.opcode_location { + OpcodeLocation::Acir(acir_index) => circuit_addresses[*acir_index], + OpcodeLocation::Brillig { acir_index, brillig_index } => { + circuit_addresses[*acir_index] + *brillig_index + } + } + } + + pub fn address_to_debug_location(&self, address: usize) -> Option { + if address > self.last_valid_address { + return None; + } + // We binary search if the given address is the first opcode address of each circuit id + // if is not, this means that the address itself is "contained" in the previous + // circuit indicated by `Err(insert_index)` + let circuit_id = + match self.addresses.binary_search_by(|addresses| addresses[0].cmp(&address)) { + Ok(found_index) => found_index, + // This means that the address is not in `insert_index` circuit + // because is an `Err`, so it must be included in previous circuit vec of opcodes + Err(insert_index) => insert_index - 1, + }; + + // We binary search among the selected `circuit_id`` list of opcodes + // If Err(insert_index) this means that the given address + // is a Brillig addresses that's contained in previous index ACIR opcode index + let opcode_location = match self.addresses[circuit_id].binary_search(&address) { + Ok(found_index) => OpcodeLocation::Acir(found_index), + Err(insert_index) => { + let acir_index = insert_index - 1; + let base_offset = self.addresses[circuit_id][acir_index]; + let brillig_index = address - base_offset; + OpcodeLocation::Brillig { acir_index, brillig_index } + } + }; + Some(DebugLocation { circuit_id: circuit_id as u32, opcode_location }) + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] +pub struct DebugLocation { + pub circuit_id: u32, + pub opcode_location: OpcodeLocation, +} + +impl std::fmt::Display for DebugLocation { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let circuit_id = self.circuit_id; + match self.opcode_location { + OpcodeLocation::Acir(index) => write!(f, "{circuit_id}:{index}"), + OpcodeLocation::Brillig { acir_index, brillig_index } => { + write!(f, "{circuit_id}:{acir_index}.{brillig_index}") + } + } + } +} + +#[derive(Error, Debug)] +pub enum DebugLocationFromStrError { + #[error("Invalid debug location string: {0}")] + InvalidDebugLocationString(String), +} + +impl std::str::FromStr for DebugLocation { + type Err = DebugLocationFromStrError; + fn from_str(s: &str) -> Result { + let parts: Vec<_> = s.split(':').collect(); + let error = Err(DebugLocationFromStrError::InvalidDebugLocationString(s.to_string())); + + match parts.len() { + 1 => OpcodeLocation::from_str(parts[0]).map_or(error, |opcode_location| { + Ok(DebugLocation { circuit_id: 0, opcode_location }) + }), + 2 => { + let first_part = parts[0].parse().ok(); + let second_part = OpcodeLocation::from_str(parts[1]).ok(); + if let (Some(circuit_id), Some(opcode_location)) = (first_part, second_part) { + Ok(DebugLocation { circuit_id, opcode_location }) + } else { + error + } + } + _ => error, + } + } +} + #[derive(Debug)] pub(super) enum DebugCommandResult { Done, Ok, - BreakpointReached(OpcodeLocation), + BreakpointReached(DebugLocation), Error(NargoError), } +pub struct ExecutionFrame<'a, B: BlackBoxFunctionSolver> { + circuit_id: u32, + acvm: ACVM<'a, FieldElement, B>, +} + pub(super) struct DebugContext<'a, B: BlackBoxFunctionSolver> { acvm: ACVM<'a, FieldElement, B>, + current_circuit_id: u32, brillig_solver: Option>, + + witness_stack: WitnessStack, + acvm_stack: Vec>, + + backend: &'a B, foreign_call_executor: Box, + debug_artifact: &'a DebugArtifact, - breakpoints: HashSet, - source_to_opcodes: BTreeMap>, + breakpoints: HashSet, + source_to_locations: BTreeMap>, + + circuits: &'a [Circuit], unconstrained_functions: &'a [BrilligBytecode], - // Absolute (in terms of all the opcodes ACIR+Brillig) addresses of the ACIR - // opcodes with one additional entry for to indicate the last valid address. - acir_opcode_addresses: Vec, + acir_opcode_addresses: AddressMap, } impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { pub(super) fn new( blackbox_solver: &'a B, - circuit: &'a Circuit, + circuits: &'a [Circuit], debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, foreign_call_executor: Box, unconstrained_functions: &'a [BrilligBytecode], ) -> Self { let source_to_opcodes = build_source_to_opcode_debug_mappings(debug_artifact); - let acir_opcode_addresses = build_acir_opcode_offsets(circuit, unconstrained_functions); + let current_circuit_id: u32 = 0; + let initial_circuit = &circuits[current_circuit_id as usize]; + let acir_opcode_addresses = AddressMap::new(circuits, unconstrained_functions); Self { - // TODO: need to handle brillig pointer in the debugger acvm: ACVM::new( blackbox_solver, - &circuit.opcodes, + &initial_circuit.opcodes, initial_witness, unconstrained_functions, - &circuit.assert_messages, + &initial_circuit.assert_messages, ), + current_circuit_id, brillig_solver: None, + witness_stack: WitnessStack::default(), + acvm_stack: vec![], + backend: blackbox_solver, foreign_call_executor, debug_artifact, breakpoints: HashSet::new(), - source_to_opcodes, + source_to_locations: source_to_opcodes, + circuits, unconstrained_functions, acir_opcode_addresses, } @@ -74,6 +255,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.acvm.opcodes() } + pub(super) fn get_opcodes_of_circuit(&self, circuit_id: u32) -> &[Opcode] { + &self.circuits[circuit_id as usize].opcodes + } + pub(super) fn get_witness_map(&self) -> &WitnessMap { self.acvm.witness_map() } @@ -86,36 +271,49 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.acvm.overwrite_witness(witness, value) } - pub(super) fn get_current_opcode_location(&self) -> Option { + pub(super) fn get_current_debug_location(&self) -> Option { let ip = self.acvm.instruction_pointer(); if ip >= self.get_opcodes().len() { None - } else if let Some(ref solver) = self.brillig_solver { - Some(OpcodeLocation::Brillig { - acir_index: ip, - brillig_index: solver.program_counter(), - }) } else { - Some(OpcodeLocation::Acir(ip)) + let opcode_location = if let Some(ref solver) = self.brillig_solver { + OpcodeLocation::Brillig { acir_index: ip, brillig_index: solver.program_counter() } + } else { + OpcodeLocation::Acir(ip) + }; + Some(DebugLocation { circuit_id: self.current_circuit_id, opcode_location }) } } - pub(super) fn get_call_stack(&self) -> Vec { + pub(super) fn get_call_stack(&self) -> Vec { + // Build the frames from parent ACIR calls + let mut frames: Vec<_> = self + .acvm_stack + .iter() + .map(|ExecutionFrame { circuit_id, acvm }| DebugLocation { + circuit_id: *circuit_id, + opcode_location: OpcodeLocation::Acir(acvm.instruction_pointer()), + }) + .collect(); + + // Now add the frame(s) for the currently executing ACVM let instruction_pointer = self.acvm.instruction_pointer(); - if instruction_pointer >= self.get_opcodes().len() { - vec![] - } else if let Some(ref solver) = self.brillig_solver { - solver - .get_call_stack() - .iter() - .map(|program_counter| OpcodeLocation::Brillig { + let circuit_id = self.current_circuit_id; + if let Some(ref solver) = self.brillig_solver { + frames.extend(solver.get_call_stack().iter().map(|program_counter| DebugLocation { + circuit_id, + opcode_location: OpcodeLocation::Brillig { acir_index: instruction_pointer, brillig_index: *program_counter, - }) - .collect() - } else { - vec![OpcodeLocation::Acir(instruction_pointer)] + }, + })); + } else if instruction_pointer < self.get_opcodes().len() { + frames.push(DebugLocation { + circuit_id, + opcode_location: OpcodeLocation::Acir(instruction_pointer), + }); } + frames } pub(super) fn is_source_location_in_debug_module(&self, location: &Location) -> bool { @@ -142,10 +340,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { &self, file_id: &FileId, line: i64, - ) -> Option { + ) -> Option { let line = line as usize; - let line_to_opcodes = self.source_to_opcodes.get(file_id)?; - let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { + let line_to_opcodes = self.source_to_locations.get(file_id)?; + let found_location = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { Ok(index) => { // move backwards to find the first opcode which matches the line let mut index = index; @@ -161,7 +359,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { line_to_opcodes[index].1 } }; - Some(found_index) + Some(found_location) } /// Returns the callstack in source code locations for the currently @@ -172,9 +370,9 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { /// This function also filters source locations that are determined to be in /// the internal debug module. pub(super) fn get_current_source_location(&self) -> Option> { - self.get_current_opcode_location() + self.get_current_debug_location() .as_ref() - .map(|opcode_location| self.get_source_location_for_opcode_location(opcode_location)) + .map(|debug_location| self.get_source_location_for_debug_location(debug_location)) .filter(|v: &Vec| !v.is_empty()) } @@ -184,15 +382,12 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { /// the given opcode location cannot be mapped back to a source location /// (eg. it may be pure debug instrumentation code or other synthetically /// produced opcode by the compiler) - pub(super) fn get_source_location_for_opcode_location( + pub(super) fn get_source_location_for_debug_location( &self, - opcode_location: &OpcodeLocation, + debug_location: &DebugLocation, ) -> Vec { - // TODO: this assumes we're debugging a program (ie. the DebugArtifact - // will contain a single DebugInfo), but this assumption doesn't hold - // for contracts - self.debug_artifact.debug_symbols[0] - .opcode_location(opcode_location) + self.debug_artifact.debug_symbols[debug_location.circuit_id as usize] + .opcode_location(&debug_location.opcode_location) .map(|source_locations| { source_locations .into_iter() @@ -208,48 +403,30 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { /// general, the matching between opcode location and source location is 1 /// to 1, but due to the compiler inlining functions a single opcode /// location may expand to multiple source locations. - pub(super) fn get_source_call_stack(&self) -> Vec<(OpcodeLocation, Location)> { + pub(super) fn get_source_call_stack(&self) -> Vec<(DebugLocation, Location)> { self.get_call_stack() .iter() - .flat_map(|opcode_location| { - self.get_source_location_for_opcode_location(opcode_location) + .flat_map(|debug_location| { + self.get_source_location_for_debug_location(debug_location) .into_iter() - .map(|source_location| (*opcode_location, source_location)) + .map(|source_location| (*debug_location, source_location)) }) .collect() } /// Returns the absolute address of the opcode at the given location. - /// Absolute here means accounting for nested Brillig opcodes in BrilligCall - /// opcodes. - pub fn opcode_location_to_address(&self, location: &OpcodeLocation) -> usize { - match location { - OpcodeLocation::Acir(acir_index) => self.acir_opcode_addresses[*acir_index], - OpcodeLocation::Brillig { acir_index, brillig_index } => { - self.acir_opcode_addresses[*acir_index] + *brillig_index - } - } + pub fn debug_location_to_address(&self, location: &DebugLocation) -> usize { + self.acir_opcode_addresses.debug_location_to_address(location) } - pub fn address_to_opcode_location(&self, address: usize) -> Option { - if address >= *self.acir_opcode_addresses.last().unwrap_or(&0) { - return None; - } - let location = match self.acir_opcode_addresses.binary_search(&address) { - Ok(found_index) => OpcodeLocation::Acir(found_index), - Err(insert_index) => { - let acir_index = insert_index - 1; - let base_offset = self.acir_opcode_addresses[acir_index]; - let brillig_index = address - base_offset; - OpcodeLocation::Brillig { acir_index, brillig_index } - } - }; - Some(location) + // Returns the DebugLocation associated to the given address + pub fn address_to_debug_location(&self, address: usize) -> Option { + self.acir_opcode_addresses.address_to_debug_location(address) } - pub(super) fn render_opcode_at_location(&self, location: &OpcodeLocation) -> String { - let opcodes = self.get_opcodes(); - match location { + pub(super) fn render_opcode_at_location(&self, location: &DebugLocation) -> String { + let opcodes = self.get_opcodes_of_circuit(location.circuit_id); + match &location.opcode_location { OpcodeLocation::Acir(acir_index) => { let opcode = &opcodes[*acir_index]; match opcode { @@ -280,7 +457,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.brillig_solver = Some(solver); if self.breakpoint_reached() { DebugCommandResult::BreakpointReached( - self.get_current_opcode_location() + self.get_current_debug_location() .expect("Breakpoint reached but we have no location"), ) } else { @@ -296,7 +473,6 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.handle_foreign_call(foreign_call) } Err(err) => DebugCommandResult::Error(NargoError::ExecutionError( - // TODO: debugger does not handle multiple acir calls ExecutionError::SolvingError(err, None), )), } @@ -315,24 +491,82 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } else { self.acvm.resolve_pending_foreign_call(foreign_call_result); } - // TODO: should we retry executing the opcode somehow in this case? + // TODO: should we retry executing the opcode somehow in this + // case? Otherwise, executing a foreign call takes two debugging + // steps. DebugCommandResult::Ok } Err(error) => DebugCommandResult::Error(error.into()), } } - fn handle_acvm_status(&mut self, status: ACVMStatus) -> DebugCommandResult { - if let ACVMStatus::RequiresForeignCall(foreign_call) = status { - return self.handle_foreign_call(foreign_call); + fn handle_acir_call( + &mut self, + call_info: AcirCallWaitInfo, + ) -> DebugCommandResult { + let callee_circuit = &self.circuits[call_info.id as usize]; + let callee_witness_map = call_info.initial_witness; + let callee_acvm = ACVM::new( + self.backend, + &callee_circuit.opcodes, + callee_witness_map, + self.unconstrained_functions, + &callee_circuit.assert_messages, + ); + let caller_acvm = std::mem::replace(&mut self.acvm, callee_acvm); + self.acvm_stack + .push(ExecutionFrame { circuit_id: self.current_circuit_id, acvm: caller_acvm }); + self.current_circuit_id = call_info.id; + + // Explicitly handling the new ACVM status here handles two edge cases: + // 1. there is a breakpoint set at the beginning of a circuit + // 2. the called circuit has no opcodes + self.handle_acvm_status(self.acvm.get_status().clone()) + } + + fn handle_acir_call_finished(&mut self) -> DebugCommandResult { + let caller_frame = self.acvm_stack.pop().expect("Execution stack should not be empty"); + let caller_acvm = caller_frame.acvm; + let callee_acvm = std::mem::replace(&mut self.acvm, caller_acvm); + self.current_circuit_id = caller_frame.circuit_id; + let call_solved_witness = callee_acvm.finalize(); + + let ACVMStatus::RequiresAcirCall(call_info) = self.acvm.get_status() else { + unreachable!("Resolving an ACIR call, the caller is in an invalid state"); + }; + let acir_to_call = &self.circuits[call_info.id as usize]; + + let mut call_resolved_outputs = Vec::new(); + for return_witness_index in acir_to_call.return_values.indices() { + if let Some(return_value) = call_solved_witness.get_index(return_witness_index) { + call_resolved_outputs.push(*return_value); + } else { + return DebugCommandResult::Error( + ExecutionError::SolvingError( + OpcodeNotSolvable::MissingAssignment(return_witness_index).into(), + None, // Missing assignment errors do not supply user-facing diagnostics so we do not need to attach a call stack + ) + .into(), + ); + } } + self.acvm.resolve_pending_acir_call(call_resolved_outputs); + + DebugCommandResult::Ok + } + fn handle_acvm_status(&mut self, status: ACVMStatus) -> DebugCommandResult { match status { - ACVMStatus::Solved => DebugCommandResult::Done, + ACVMStatus::Solved => { + if self.acvm_stack.is_empty() { + return DebugCommandResult::Done; + } + self.handle_acir_call_finished() + } ACVMStatus::InProgress => { if self.breakpoint_reached() { DebugCommandResult::BreakpointReached( - self.get_current_opcode_location() + self.get_current_debug_location() .expect("Breakpoint reached but we have no location"), ) } else { @@ -340,15 +574,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } ACVMStatus::Failure(error) => DebugCommandResult::Error(NargoError::ExecutionError( - // TODO: debugger does not handle multiple acir calls ExecutionError::SolvingError(error, None), )), - ACVMStatus::RequiresForeignCall(_) => { - unreachable!("Unexpected pending foreign call resolution"); - } - ACVMStatus::RequiresAcirCall(_) => { - todo!("Multiple ACIR calls are not supported"); - } + ACVMStatus::RequiresForeignCall(foreign_call) => self.handle_foreign_call(foreign_call), + ACVMStatus::RequiresAcirCall(call_info) => self.handle_acir_call(call_info), } } @@ -367,9 +596,11 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } fn get_current_acir_index(&self) -> Option { - self.get_current_opcode_location().map(|opcode_location| match opcode_location { - OpcodeLocation::Acir(acir_index) => acir_index, - OpcodeLocation::Brillig { acir_index, .. } => acir_index, + self.get_current_debug_location().map(|debug_location| { + match debug_location.opcode_location { + OpcodeLocation::Acir(acir_index) => acir_index, + OpcodeLocation::Brillig { acir_index, .. } => acir_index, + } }) } @@ -394,10 +625,16 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { return true; } - match self.get_current_opcode_location() { - Some(OpcodeLocation::Brillig { .. }) => true, - Some(OpcodeLocation::Acir(acir_index)) => { - matches!(self.get_opcodes()[acir_index], Opcode::BrilligCall { .. }) + match self.get_current_debug_location() { + Some(DebugLocation { opcode_location: OpcodeLocation::Brillig { .. }, .. }) => true, + Some(DebugLocation { + circuit_id, + opcode_location: OpcodeLocation::Acir(acir_index), + }) => { + matches!( + self.get_opcodes_of_circuit(circuit_id)[acir_index], + Opcode::BrilligCall { .. } + ) } _ => false, } @@ -472,7 +709,12 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.brillig_solver.as_ref().map(|solver| solver.get_memory()) } - pub(super) fn write_brillig_memory(&mut self, ptr: usize, value: FieldElement, bit_size: u32) { + pub(super) fn write_brillig_memory( + &mut self, + ptr: usize, + value: FieldElement, + bit_size: BitSize, + ) { if let Some(solver) = self.brillig_solver.as_mut() { solver.write_memory_at( ptr, @@ -491,16 +733,19 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } fn breakpoint_reached(&self) -> bool { - if let Some(location) = self.get_current_opcode_location() { + if let Some(location) = self.get_current_debug_location() { self.breakpoints.contains(&location) } else { false } } - pub(super) fn is_valid_opcode_location(&self, location: &OpcodeLocation) -> bool { - let opcodes = self.get_opcodes(); - match *location { + pub(super) fn is_valid_debug_location(&self, location: &DebugLocation) -> bool { + if location.circuit_id as usize >= self.circuits.len() { + return false; + } + let opcodes = self.get_opcodes_of_circuit(location.circuit_id); + match location.opcode_location { OpcodeLocation::Acir(acir_index) => acir_index < opcodes.len(), OpcodeLocation::Brillig { acir_index, brillig_index } => { if acir_index < opcodes.len() { @@ -518,19 +763,19 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } - pub(super) fn is_breakpoint_set(&self, location: &OpcodeLocation) -> bool { + pub(super) fn is_breakpoint_set(&self, location: &DebugLocation) -> bool { self.breakpoints.contains(location) } - pub(super) fn add_breakpoint(&mut self, location: OpcodeLocation) -> bool { + pub(super) fn add_breakpoint(&mut self, location: DebugLocation) -> bool { self.breakpoints.insert(location) } - pub(super) fn delete_breakpoint(&mut self, location: &OpcodeLocation) -> bool { + pub(super) fn delete_breakpoint(&mut self, location: &DebugLocation) -> bool { self.breakpoints.remove(location) } - pub(super) fn iterate_breakpoints(&self) -> Iter<'_, OpcodeLocation> { + pub(super) fn iterate_breakpoints(&self) -> Iter<'_, DebugLocation> { self.breakpoints.iter() } @@ -542,8 +787,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { matches!(self.acvm.get_status(), ACVMStatus::Solved) } - pub fn finalize(self) -> WitnessMap { - self.acvm.finalize() + pub fn finalize(mut self) -> WitnessStack { + let last_witness_map = self.acvm.finalize(); + self.witness_stack.push(0, last_witness_map); + self.witness_stack } } @@ -555,11 +802,10 @@ fn is_debug_file_in_debug_crate(debug_file: &DebugFile) -> bool { /// numbers and opcode locations corresponding to those line numbers fn build_source_to_opcode_debug_mappings( debug_artifact: &DebugArtifact, -) -> BTreeMap> { +) -> BTreeMap> { if debug_artifact.debug_symbols.is_empty() { return BTreeMap::new(); } - let locations = &debug_artifact.debug_symbols[0].locations; let simple_files: BTreeMap<_, _> = debug_artifact .file_map .iter() @@ -572,50 +818,34 @@ fn build_source_to_opcode_debug_mappings( }) .collect(); - let mut result: BTreeMap> = BTreeMap::new(); - locations.iter().for_each(|(opcode_location, source_locations)| { - source_locations.iter().for_each(|source_location| { - let span = source_location.span; - let file_id = source_location.file; - let Some(file) = simple_files.get(&file_id) else { - return; - }; - let Ok(line_index) = file.line_index((), span.start() as usize) else { - return; - }; - let line_number = line_index + 1; - - result.entry(file_id).or_default().push((line_number, *opcode_location)); - }); - }); + let mut result: BTreeMap> = BTreeMap::new(); + + for (circuit_id, debug_symbols) in debug_artifact.debug_symbols.iter().enumerate() { + for (opcode_location, source_locations) in &debug_symbols.locations { + source_locations.iter().for_each(|source_location| { + let span = source_location.span; + let file_id = source_location.file; + let Some(file) = simple_files.get(&file_id) else { + return; + }; + let Ok(line_index) = file.line_index((), span.start() as usize) else { + return; + }; + let line_number = line_index + 1; + + let debug_location = DebugLocation { + circuit_id: circuit_id as u32, + opcode_location: *opcode_location, + }; + result.entry(file_id).or_default().push((line_number, debug_location)); + }); + } + } result.iter_mut().for_each(|(_, file_locations)| file_locations.sort_by_key(|x| (x.0, x.1))); result } -fn build_acir_opcode_offsets( - circuit: &Circuit, - unconstrained_functions: &[BrilligBytecode], -) -> Vec { - let mut result = Vec::with_capacity(circuit.opcodes.len() + 1); - // address of the first opcode is always 0 - result.push(0); - circuit.opcodes.iter().fold(0, |acc, opcode| { - let acc = acc - + match opcode { - Opcode::BrilligCall { id, .. } => { - unconstrained_functions[*id as usize].bytecode.len() - } - _ => 1, - }; - // push the starting address of the next opcode - result.push(acc); - acc - }); - result -} - -// TODO: update all debugger tests to use unconstrained brillig pointers #[cfg(test)] mod tests { use super::*; @@ -623,9 +853,10 @@ mod tests { use crate::foreign_calls::DefaultDebugForeignCallExecutor; use acvm::{ acir::{ + brillig::IntegerBitSize, circuit::{ brillig::{BrilligInputs, BrilligOutputs}, - opcodes::BlockId, + opcodes::{BlockId, BlockType}, }, native_types::Expression, AcirField, @@ -652,7 +883,7 @@ mod tests { BrilligOpcode::Const { destination: MemoryAddress::from(1), value: fe_0, - bit_size: 32, + bit_size: BitSize::Integer(IntegerBitSize::U32), }, BrilligOpcode::ForeignCall { function: "clear_mock".into(), @@ -675,7 +906,8 @@ mod tests { }]; let brillig_funcs = &vec![brillig_bytecode]; let current_witness_index = 2; - let circuit = &Circuit { current_witness_index, opcodes, ..Circuit::default() }; + let circuit = Circuit { current_witness_index, opcodes, ..Circuit::default() }; + let circuits = &vec![circuit]; let debug_symbols = vec![]; let file_map = BTreeMap::new(); @@ -687,51 +919,66 @@ mod tests { Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); let mut context = DebugContext::new( &StubbedBlackBoxSolver, - circuit, + circuits, debug_artifact, initial_witness, foreign_call_executor, brillig_funcs, ); - assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(0))); + assert_eq!( + context.get_current_debug_location(), + Some(DebugLocation { circuit_id: 0, opcode_location: OpcodeLocation::Acir(0) }) + ); // Execute the first Brillig opcode (calldata copy) let result = context.step_into_opcode(); assert!(matches!(result, DebugCommandResult::Ok)); assert_eq!( - context.get_current_opcode_location(), - Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }) + context.get_current_debug_location(), + Some(DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 } + }) ); // execute the second Brillig opcode (const) let result = context.step_into_opcode(); assert!(matches!(result, DebugCommandResult::Ok)); assert_eq!( - context.get_current_opcode_location(), - Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }) + context.get_current_debug_location(), + Some(DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 } + }) ); // try to execute the third Brillig opcode (and resolve the foreign call) let result = context.step_into_opcode(); assert!(matches!(result, DebugCommandResult::Ok)); assert_eq!( - context.get_current_opcode_location(), - Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }) + context.get_current_debug_location(), + Some(DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 } + }) ); // retry the third Brillig opcode (foreign call should be finished) let result = context.step_into_opcode(); assert!(matches!(result, DebugCommandResult::Ok)); assert_eq!( - context.get_current_opcode_location(), - Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 3 }) + context.get_current_debug_location(), + Some(DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 3 } + }) ); // last Brillig opcode let result = context.step_into_opcode(); assert!(matches!(result, DebugCommandResult::Done)); - assert_eq!(context.get_current_opcode_location(), None); + assert_eq!(context.get_current_debug_location(), None); } #[test] @@ -784,7 +1031,8 @@ mod tests { }), ]; let current_witness_index = 3; - let circuit = &Circuit { current_witness_index, opcodes, ..Circuit::default() }; + let circuit = Circuit { current_witness_index, opcodes, ..Circuit::default() }; + let circuits = &vec![circuit]; let debug_symbols = vec![]; let file_map = BTreeMap::new(); @@ -797,7 +1045,7 @@ mod tests { let brillig_funcs = &vec![brillig_bytecode]; let mut context = DebugContext::new( &StubbedBlackBoxSolver, - circuit, + circuits, debug_artifact, initial_witness, foreign_call_executor, @@ -805,28 +1053,40 @@ mod tests { ); // set breakpoint - let breakpoint_location = OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }; + let breakpoint_location = DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }, + }; assert!(context.add_breakpoint(breakpoint_location)); // execute the first ACIR opcode (Brillig block) -> should reach the breakpoint instead let result = context.step_acir_opcode(); assert!(matches!(result, DebugCommandResult::BreakpointReached(_))); - assert_eq!(context.get_current_opcode_location(), Some(breakpoint_location)); + assert_eq!(context.get_current_debug_location(), Some(breakpoint_location)); // continue execution to the next ACIR opcode let result = context.step_acir_opcode(); assert!(matches!(result, DebugCommandResult::Ok)); - assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(1))); + assert_eq!( + context.get_current_debug_location(), + Some(DebugLocation { circuit_id: 0, opcode_location: OpcodeLocation::Acir(1) }) + ); // last ACIR opcode let result = context.step_acir_opcode(); assert!(matches!(result, DebugCommandResult::Done)); - assert_eq!(context.get_current_opcode_location(), None); + assert_eq!(context.get_current_debug_location(), None); } #[test] - fn test_address_opcode_location_mapping() { - let brillig_bytecode = BrilligBytecode { + fn test_address_debug_location_mapping() { + let brillig_one = BrilligBytecode { + bytecode: vec![ + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, + ], + }; + let brillig_two = BrilligBytecode { bytecode: vec![ BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, @@ -834,22 +1094,33 @@ mod tests { ], }; - let opcodes = vec![ - Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, - Opcode::MemoryInit { - block_id: BlockId(0), - init: vec![], - block_type: acvm::acir::circuit::opcodes::BlockType::Memory, - }, - Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, - Opcode::AssertZero(Expression::default()), - ]; - let circuit = Circuit { opcodes, ..Circuit::default() }; + let circuit_one = Circuit { + opcodes: vec![ + Opcode::MemoryInit { + block_id: BlockId(0), + init: vec![], + block_type: BlockType::Memory, + }, + Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, + Opcode::Call { id: 1, inputs: vec![], outputs: vec![], predicate: None }, + Opcode::AssertZero(Expression::default()), + ], + ..Circuit::default() + }; + let circuit_two = Circuit { + opcodes: vec![ + Opcode::BrilligCall { id: 1, inputs: vec![], outputs: vec![], predicate: None }, + Opcode::AssertZero(Expression::default()), + ], + ..Circuit::default() + }; + let circuits = vec![circuit_one, circuit_two]; let debug_artifact = DebugArtifact { debug_symbols: vec![], file_map: BTreeMap::new() }; - let brillig_funcs = &vec![brillig_bytecode]; + let brillig_funcs = &vec![brillig_one, brillig_two]; + let context = DebugContext::new( &StubbedBlackBoxSolver, - &circuit, + &circuits, &debug_artifact, WitnessMap::new(), Box::new(DefaultDebugForeignCallExecutor::new(true)), @@ -857,46 +1128,56 @@ mod tests { ); let locations = - (0..=7).map(|address| context.address_to_opcode_location(address)).collect::>(); + (0..=8).map(|address| context.address_to_debug_location(address)).collect::>(); // mapping from addresses to opcode locations assert_eq!( locations, vec![ - Some(OpcodeLocation::Acir(0)), - Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }), - Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), - Some(OpcodeLocation::Acir(1)), - Some(OpcodeLocation::Acir(2)), - Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), - Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 2 }), - Some(OpcodeLocation::Acir(3)), + Some(DebugLocation { circuit_id: 0, opcode_location: OpcodeLocation::Acir(0) }), + Some(DebugLocation { circuit_id: 0, opcode_location: OpcodeLocation::Acir(1) }), + Some(DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 1, brillig_index: 1 } + }), + Some(DebugLocation { circuit_id: 0, opcode_location: OpcodeLocation::Acir(2) }), + Some(DebugLocation { circuit_id: 0, opcode_location: OpcodeLocation::Acir(3) }), + Some(DebugLocation { circuit_id: 1, opcode_location: OpcodeLocation::Acir(0) }), + Some(DebugLocation { + circuit_id: 1, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 } + }), + Some(DebugLocation { + circuit_id: 1, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 } + }), + Some(DebugLocation { circuit_id: 1, opcode_location: OpcodeLocation::Acir(1) }), ] ); let addresses = locations .iter() .flatten() - .map(|location| context.opcode_location_to_address(location)) + .map(|location| context.debug_location_to_address(location)) .collect::>(); // and vice-versa - assert_eq!(addresses, (0..=7).collect::>()); + assert_eq!(addresses, (0..=8).collect::>()); // check edge cases - assert_eq!(None, context.address_to_opcode_location(8)); + assert_eq!(None, context.address_to_debug_location(9)); assert_eq!( - 0, - context.opcode_location_to_address(&OpcodeLocation::Brillig { - acir_index: 0, - brillig_index: 0 + 1, + context.debug_location_to_address(&DebugLocation { + circuit_id: 0, + opcode_location: OpcodeLocation::Brillig { acir_index: 1, brillig_index: 0 } }) ); assert_eq!( - 4, - context.opcode_location_to_address(&OpcodeLocation::Brillig { - acir_index: 2, - brillig_index: 0 + 5, + context.debug_location_to_address(&DebugLocation { + circuit_id: 1, + opcode_location: OpcodeLocation::Brillig { acir_index: 0, brillig_index: 0 } }) ); } diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index 77abf3093cd..cfe33a61cb5 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -2,12 +2,12 @@ use std::collections::BTreeMap; use std::io::{Read, Write}; use acvm::acir::circuit::brillig::BrilligBytecode; -use acvm::acir::circuit::{Circuit, OpcodeLocation}; +use acvm::acir::circuit::Circuit; use acvm::acir::native_types::WitnessMap; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use crate::context::DebugCommandResult; use crate::context::DebugContext; +use crate::context::{DebugCommandResult, DebugLocation}; use crate::foreign_calls::DefaultDebugForeignCallExecutor; use dap::errors::ServerError; @@ -37,8 +37,8 @@ pub struct DapSession<'a, R: Read, W: Write, B: BlackBoxFunctionSolver, - source_breakpoints: BTreeMap>, + instruction_breakpoints: Vec<(DebugLocation, BreakpointId)>, + source_breakpoints: BTreeMap>, } enum ScopeReferences { @@ -61,14 +61,14 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< pub fn new( server: Server, solver: &'a B, - circuit: &'a Circuit, + circuits: &'a [Circuit], debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, unconstrained_functions: &'a [BrilligBytecode], ) -> Self { let context = DebugContext::new( solver, - circuit, + circuits, debug_artifact, initial_witness, Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)), @@ -100,7 +100,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< } pub fn run_loop(&mut self) -> Result<(), ServerError> { - self.running = self.context.get_current_opcode_location().is_some(); + self.running = self.context.get_current_debug_location().is_some(); if self.running && self.context.get_current_source_location().is_none() { // TODO: remove this? This is to ensure that the tool has a proper @@ -194,7 +194,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< .get_source_call_stack() .iter() .enumerate() - .map(|(index, (opcode_location, source_location))| { + .map(|(index, (debug_location, source_location))| { let line_number = self.debug_artifact.location_line_number(*source_location).unwrap(); let column_number = @@ -204,7 +204,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< Some(frame) => format!("{} {}", frame.function_name, index), None => format!("frame #{index}"), }; - let address = self.context.opcode_location_to_address(opcode_location); + let address = self.context.debug_location_to_address(debug_location); StackFrame { id: index as i64, @@ -251,18 +251,18 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< let mut instructions: Vec = vec![]; while count > 0 { - let opcode_location = if address >= 0 { - self.context.address_to_opcode_location(address as usize) + let debug_location = if address >= 0 { + self.context.address_to_debug_location(address as usize) } else { None }; - if let Some(opcode_location) = opcode_location { + if let Some(debug_location) = debug_location { instructions.push(DisassembledInstruction { address: address.to_string(), // we'll use the instruction_bytes field to render the OpcodeLocation - instruction_bytes: Some(opcode_location.to_string()), - instruction: self.context.render_opcode_at_location(&opcode_location), + instruction_bytes: Some(debug_location.to_string()), + instruction: self.context.render_opcode_at_location(&debug_location), ..DisassembledInstruction::default() }); } else { @@ -320,16 +320,16 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< self.handle_execution_result(result) } - fn find_breakpoints_at_location(&self, opcode_location: &OpcodeLocation) -> Vec { + fn find_breakpoints_at_location(&self, debug_location: &DebugLocation) -> Vec { let mut result = vec![]; for (location, id) in &self.instruction_breakpoints { - if opcode_location == location { + if debug_location == location { result.push(*id); } } for breakpoints in self.source_breakpoints.values() { for (location, id) in breakpoints { - if opcode_location == location { + if debug_location == location { result.push(*id); } } @@ -404,7 +404,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< }; // compute breakpoints to set and return - let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; + let mut breakpoints_to_set: Vec<(DebugLocation, i64)> = vec![]; let breakpoints: Vec = args .breakpoints .iter() @@ -420,8 +420,8 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< }; let Some(location) = self .context - .address_to_opcode_location(address) - .filter(|location| self.context.is_valid_opcode_location(location)) + .address_to_debug_location(address) + .filter(|location| self.context.is_valid_debug_location(location)) else { return Breakpoint { verified: false, @@ -472,7 +472,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< let Some(ref breakpoints) = &args.breakpoints else { return vec![]; }; - let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; + let mut breakpoints_to_set: Vec<(DebugLocation, i64)> = vec![]; let breakpoints = breakpoints .iter() .map(|breakpoint| { @@ -490,14 +490,14 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< // TODO: line will not necessarily be the one requested; we // should do the reverse mapping and retrieve the actual source // code line number - if !self.context.is_valid_opcode_location(&location) { + if !self.context.is_valid_debug_location(&location) { return Breakpoint { verified: false, message: Some(String::from("Invalid opcode location")), ..Breakpoint::default() }; } - let breakpoint_address = self.context.opcode_location_to_address(&location); + let breakpoint_address = self.context.debug_location_to_address(&location); let instruction_reference = format!("{}", breakpoint_address); let breakpoint_id = self.get_next_breakpoint_id(); breakpoints_to_set.push((location, breakpoint_id)); @@ -612,7 +612,7 @@ pub fn run_session>( let mut session = DapSession::new( server, solver, - &program.program.functions[0], + &program.program.functions, &debug_artifact, initial_witness, &program.program.unconstrained_functions, diff --git a/tooling/debugger/src/lib.rs b/tooling/debugger/src/lib.rs index 9d0059ee495..37ac088ca35 100644 --- a/tooling/debugger/src/lib.rs +++ b/tooling/debugger/src/lib.rs @@ -9,23 +9,18 @@ use std::io::{Read, Write}; use ::dap::errors::ServerError; use ::dap::server::Server; -use acvm::acir::circuit::brillig::BrilligBytecode; -use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use noirc_artifacts::debug::DebugArtifact; - use nargo::NargoError; use noirc_driver::CompiledProgram; -pub fn debug_circuit>( - blackbox_solver: &B, - circuit: &Circuit, - debug_artifact: DebugArtifact, +pub fn run_repl_session>( + solver: &B, + program: CompiledProgram, initial_witness: WitnessMap, - unconstrained_functions: &[BrilligBytecode], -) -> Result>, NargoError> { - repl::run(blackbox_solver, circuit, &debug_artifact, initial_witness, unconstrained_functions) +) -> Result>, NargoError> { + repl::run(solver, program, initial_witness) } pub fn run_dap_loop>( diff --git a/tooling/debugger/src/repl.rs b/tooling/debugger/src/repl.rs index 7d8c6e0947d..bd9b316331d 100644 --- a/tooling/debugger/src/repl.rs +++ b/tooling/debugger/src/repl.rs @@ -1,11 +1,14 @@ -use crate::context::{DebugCommandResult, DebugContext}; +use crate::context::{DebugCommandResult, DebugContext, DebugLocation}; +use acvm::acir::brillig::{BitSize, IntegerBitSize}; use acvm::acir::circuit::brillig::BrilligBytecode; use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; -use acvm::acir::native_types::{Witness, WitnessMap}; +use acvm::acir::native_types::{Witness, WitnessMap, WitnessStack}; use acvm::brillig_vm::brillig::Opcode as BrilligOpcode; +use acvm::brillig_vm::MemoryValue; use acvm::{BlackBoxFunctionSolver, FieldElement}; use nargo::NargoError; +use noirc_driver::CompiledProgram; use crate::foreign_calls::DefaultDebugForeignCallExecutor; use noirc_artifacts::debug::DebugArtifact; @@ -19,17 +22,21 @@ use crate::source_code_printer::print_source_code_location; pub struct ReplDebugger<'a, B: BlackBoxFunctionSolver> { context: DebugContext<'a, B>, blackbox_solver: &'a B, - circuit: &'a Circuit, debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, last_result: DebugCommandResult, + + // ACIR functions to debug + circuits: &'a [Circuit], + + // Brillig functions referenced from the ACIR circuits above unconstrained_functions: &'a [BrilligBytecode], } impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { pub fn new( blackbox_solver: &'a B, - circuit: &'a Circuit, + circuits: &'a [Circuit], debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, unconstrained_functions: &'a [BrilligBytecode], @@ -38,13 +45,13 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); let context = DebugContext::new( blackbox_solver, - circuit, + circuits, debug_artifact, initial_witness.clone(), foreign_call_executor, unconstrained_functions, ); - let last_result = if context.get_current_opcode_location().is_none() { + let last_result = if context.get_current_debug_location().is_none() { // handle circuit with no opcodes DebugCommandResult::Done } else { @@ -53,7 +60,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { Self { context, blackbox_solver, - circuit, + circuits, debug_artifact, initial_witness, last_result, @@ -62,42 +69,43 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } pub fn show_current_vm_status(&self) { - let location = self.context.get_current_opcode_location(); - let opcodes = self.context.get_opcodes(); + let location = self.context.get_current_debug_location(); match location { None => println!("Finished execution"), Some(location) => { - match location { + let circuit_id = location.circuit_id; + let opcodes = self.context.get_opcodes_of_circuit(circuit_id); + match &location.opcode_location { OpcodeLocation::Acir(ip) => { - println!("At opcode {}: {}", ip, opcodes[ip]); + println!("At opcode {} :: {}", location, opcodes[*ip]); } OpcodeLocation::Brillig { acir_index, brillig_index } => { let brillig_bytecode = - if let Opcode::BrilligCall { id, .. } = opcodes[acir_index] { + if let Opcode::BrilligCall { id, .. } = opcodes[*acir_index] { &self.unconstrained_functions[id as usize].bytecode } else { unreachable!("Brillig location does not contain Brillig opcodes"); }; println!( - "At opcode {}.{}: {:?}", - acir_index, brillig_index, brillig_bytecode[brillig_index] + "At opcode {} :: {:?}", + location, brillig_bytecode[*brillig_index] ); } } - let locations = self.context.get_source_location_for_opcode_location(&location); + let locations = self.context.get_source_location_for_debug_location(&location); print_source_code_location(self.debug_artifact, &locations); } } } - fn show_stack_frame(&self, index: usize, location: &OpcodeLocation) { + fn show_stack_frame(&self, index: usize, debug_location: &DebugLocation) { let opcodes = self.context.get_opcodes(); - match location { + match &debug_location.opcode_location { OpcodeLocation::Acir(instruction_pointer) => { println!( - "Frame #{index}, opcode {}: {}", - instruction_pointer, opcodes[*instruction_pointer] + "Frame #{index}, opcode {} :: {}", + debug_location, opcodes[*instruction_pointer] ) } OpcodeLocation::Brillig { acir_index, brillig_index } => { @@ -108,12 +116,12 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { unreachable!("Brillig location does not contain Brillig opcodes"); }; println!( - "Frame #{index}, opcode {}.{}: {:?}", - acir_index, brillig_index, brillig_bytecode[*brillig_index] + "Frame #{index}, opcode {} :: {:?}", + debug_location, brillig_bytecode[*brillig_index] ); } } - let locations = self.context.get_source_location_for_opcode_location(location); + let locations = self.context.get_source_location_for_debug_location(debug_location); print_source_code_location(self.debug_artifact, &locations); } @@ -130,8 +138,21 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } fn display_opcodes(&self) { - let opcodes = self.context.get_opcodes(); - let current_opcode_location = self.context.get_current_opcode_location(); + for i in 0..self.circuits.len() { + self.display_opcodes_of_circuit(i as u32); + } + } + + fn display_opcodes_of_circuit(&self, circuit_id: u32) { + let current_opcode_location = + self.context.get_current_debug_location().and_then(|debug_location| { + if debug_location.circuit_id == circuit_id { + Some(debug_location.opcode_location) + } else { + None + } + }); + let opcodes = self.context.get_opcodes_of_circuit(circuit_id); let current_acir_index = match current_opcode_location { Some(OpcodeLocation::Acir(ip)) => Some(ip), Some(OpcodeLocation::Brillig { acir_index, .. }) => Some(acir_index), @@ -144,7 +165,10 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { let outer_marker = |acir_index| { if current_acir_index == Some(acir_index) { "->" - } else if self.context.is_breakpoint_set(&OpcodeLocation::Acir(acir_index)) { + } else if self.context.is_breakpoint_set(&DebugLocation { + circuit_id, + opcode_location: OpcodeLocation::Acir(acir_index), + }) { " *" } else { "" @@ -153,10 +177,10 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { let brillig_marker = |acir_index, brillig_index| { if current_acir_index == Some(acir_index) && brillig_index == current_brillig_index { "->" - } else if self - .context - .is_breakpoint_set(&OpcodeLocation::Brillig { acir_index, brillig_index }) - { + } else if self.context.is_breakpoint_set(&DebugLocation { + circuit_id, + opcode_location: OpcodeLocation::Brillig { acir_index, brillig_index }, + }) { " *" } else { "" @@ -165,7 +189,8 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { let print_brillig_bytecode = |acir_index, bytecode: &[BrilligOpcode]| { for (brillig_index, brillig_opcode) in bytecode.iter().enumerate() { println!( - "{:>3}.{:<2} |{:2} {:?}", + "{:>2}:{:>3}.{:<2} |{:2} {:?}", + circuit_id, acir_index, brillig_index, brillig_marker(acir_index, brillig_index), @@ -178,33 +203,33 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { match &opcode { Opcode::BrilligCall { id, inputs, outputs, .. } => { println!( - "{:>3} {:2} BRILLIG CALL id={} inputs={:?}", - acir_index, marker, id, inputs + "{:>2}:{:>3} {:2} BRILLIG CALL id={} inputs={:?}", + circuit_id, acir_index, marker, id, inputs ); - println!(" | outputs={:?}", outputs); + println!(" | outputs={:?}", outputs); let bytecode = &self.unconstrained_functions[*id as usize].bytecode; print_brillig_bytecode(acir_index, bytecode); } - _ => println!("{:>3} {:2} {:?}", acir_index, marker, opcode), + _ => println!("{:>2}:{:>3} {:2} {:?}", circuit_id, acir_index, marker, opcode), } } } - fn add_breakpoint_at(&mut self, location: OpcodeLocation) { - if !self.context.is_valid_opcode_location(&location) { - println!("Invalid opcode location {location}"); + fn add_breakpoint_at(&mut self, location: DebugLocation) { + if !self.context.is_valid_debug_location(&location) { + println!("Invalid location {location}"); } else if self.context.add_breakpoint(location) { - println!("Added breakpoint at opcode {location}"); + println!("Added breakpoint at {location}"); } else { - println!("Breakpoint at opcode {location} already set"); + println!("Breakpoint at {location} already set"); } } - fn delete_breakpoint_at(&mut self, location: OpcodeLocation) { + fn delete_breakpoint_at(&mut self, location: DebugLocation) { if self.context.delete_breakpoint(&location) { - println!("Breakpoint at opcode {location} deleted"); + println!("Breakpoint at {location} deleted"); } else { - println!("Breakpoint at opcode {location} not set"); + println!("Breakpoint at {location} not set"); } } @@ -281,20 +306,19 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } fn restart_session(&mut self) { - let breakpoints: Vec = - self.context.iterate_breakpoints().copied().collect(); + let breakpoints: Vec = self.context.iterate_breakpoints().copied().collect(); let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, self.debug_artifact)); self.context = DebugContext::new( self.blackbox_solver, - self.circuit, + self.circuits, self.debug_artifact, self.initial_witness.clone(), foreign_call_executor, self.unconstrained_functions, ); - for opcode_location in breakpoints { - self.context.add_breakpoint(opcode_location); + for debug_location in breakpoints { + self.context.add_breakpoint(debug_location); } self.last_result = DebugCommandResult::Ok; println!("Restarted debugging session."); @@ -340,7 +364,11 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { return; }; - for (index, value) in memory.iter().enumerate().filter(|(_, value)| value.bit_size() > 0) { + for (index, value) in memory + .iter() + .enumerate() + .filter(|(_, value)| !matches!(value, MemoryValue::Integer(_, IntegerBitSize::U0))) + { println!("{index} = {}", value); } } @@ -350,6 +378,12 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { println!("Invalid value: {value}"); return; }; + + let Ok(bit_size) = BitSize::try_from_u32::(bit_size) else { + println!("Invalid bit size: {bit_size}"); + return; + }; + if !self.context.is_executing_brillig() { println!("Not executing a Brillig block"); return; @@ -372,21 +406,23 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { self.context.is_solved() } - fn finalize(self) -> WitnessMap { + fn finalize(self) -> WitnessStack { self.context.finalize() } } pub fn run>( blackbox_solver: &B, - circuit: &Circuit, - debug_artifact: &DebugArtifact, + program: CompiledProgram, initial_witness: WitnessMap, - unconstrained_functions: &[BrilligBytecode], -) -> Result>, NargoError> { +) -> Result>, NargoError> { + let circuits = &program.program.functions; + let debug_artifact = + &DebugArtifact { debug_symbols: program.debug, file_map: program.file_map }; + let unconstrained_functions = &program.program.unconstrained_functions; let context = RefCell::new(ReplDebugger::new( blackbox_solver, - circuit, + circuits, debug_artifact, initial_witness, unconstrained_functions, @@ -480,7 +516,7 @@ pub fn run>( "break", command! { "add a breakpoint at an opcode location", - (LOCATION:OpcodeLocation) => |location| { + (LOCATION:DebugLocation) => |location| { ref_context.borrow_mut().add_breakpoint_at(location); Ok(CommandStatus::Done) } @@ -490,7 +526,7 @@ pub fn run>( "delete", command! { "delete breakpoint at an opcode location", - (LOCATION:OpcodeLocation) => |location| { + (LOCATION:DebugLocation) => |location| { ref_context.borrow_mut().delete_breakpoint_at(location); Ok(CommandStatus::Done) } @@ -576,8 +612,8 @@ pub fn run>( drop(repl); if context.borrow().is_solved() { - let solved_witness = context.into_inner().finalize(); - Ok(Some(solved_witness)) + let solved_witness_stack = context.into_inner().finalize(); + Ok(Some(solved_witness_stack)) } else { Ok(None) } diff --git a/tooling/debugger/tests/debug.rs b/tooling/debugger/tests/debug.rs index 313b6b30591..2dca6b95f0e 100644 --- a/tooling/debugger/tests/debug.rs +++ b/tooling/debugger/tests/debug.rs @@ -12,7 +12,7 @@ mod tests { let nargo_bin = cargo_bin("nargo").into_os_string().into_string().expect("Cannot parse nargo path"); - let timeout_seconds = 20; + let timeout_seconds = 25; let mut dbg_session = spawn_bash(Some(timeout_seconds * 1000)).expect("Could not start bash session"); diff --git a/tooling/fuzzer/src/dictionary/mod.rs b/tooling/fuzzer/src/dictionary/mod.rs index a45b9c3abb2..942462c4f37 100644 --- a/tooling/fuzzer/src/dictionary/mod.rs +++ b/tooling/fuzzer/src/dictionary/mod.rs @@ -113,14 +113,18 @@ fn build_dictionary_from_unconstrained_function( for opcode in &function.bytecode { match opcode { BrilligOpcode::Cast { bit_size, .. } => { - let field = 1u128.wrapping_shl(*bit_size); + let bit_size = bit_size.to_u32::(); + + let field = 1u128.wrapping_shl(bit_size); constants.insert(F::from(field)); constants.insert(F::from(field - 1)); } BrilligOpcode::Const { bit_size, value, .. } => { + let bit_size = bit_size.to_u32::(); + constants.insert(*value); - let field = 1u128.wrapping_shl(*bit_size); + let field = 1u128.wrapping_shl(bit_size); constants.insert(F::from(field)); constants.insert(F::from(field - 1)); } diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index b62f97a4918..c7b70339e1d 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -21,13 +21,16 @@ use async_lsp::{ use fm::{codespan_files as files, FileManager}; use fxhash::FxHashSet; use lsp_types::{ - request::{PrepareRenameRequest, References, Rename}, + request::{ + DocumentSymbolRequest, HoverRequest, InlayHintRequest, PrepareRenameRequest, References, + Rename, + }, CodeLens, }; use nargo::{ package::{Package, PackageType}, parse_all, - workspace::Workspace, + workspace::{self, Workspace}, }; use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; @@ -45,10 +48,11 @@ use notifications::{ on_did_open_text_document, on_did_save_text_document, on_exit, on_initialized, }; use requests::{ - on_code_lens_request, on_formatting, on_goto_declaration_request, on_goto_definition_request, - on_goto_type_definition_request, on_initialize, on_prepare_rename_request, - on_profile_run_request, on_references_request, on_rename_request, on_shutdown, - on_test_run_request, on_tests_request, + on_code_lens_request, on_document_symbol_request, on_formatting, on_goto_declaration_request, + on_goto_definition_request, on_goto_type_definition_request, on_hover_request, on_initialize, + on_inlay_hint_request, on_prepare_rename_request, on_profile_run_request, + on_references_request, on_rename_request, on_shutdown, on_test_run_request, on_tests_request, + LspInitializationOptions, }; use serde_json::Value as JsonValue; use thiserror::Error; @@ -82,7 +86,7 @@ pub struct LspState { cached_lenses: HashMap>, cached_definitions: HashMap, cached_parsed_files: HashMap))>, - parsing_cache_enabled: bool, + options: LspInitializationOptions, } impl LspState { @@ -99,7 +103,7 @@ impl LspState { cached_definitions: HashMap::new(), open_documents_count: 0, cached_parsed_files: HashMap::new(), - parsing_cache_enabled: true, + options: Default::default(), } } } @@ -126,9 +130,12 @@ impl NargoLspService { .request::(on_goto_definition_request) .request::(on_goto_declaration_request) .request::(on_goto_type_definition_request) + .request::(on_document_symbol_request) .request::(on_references_request) .request::(on_prepare_rename_request) .request::(on_rename_request) + .request::(on_hover_request) + .request::(on_inlay_hint_request) .notification::(on_initialized) .notification::(on_did_change_configuration) .notification::(on_did_open_text_document) @@ -229,43 +236,75 @@ fn byte_span_to_range<'a, F: files::Files<'a> + ?Sized>( } } -pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result { - if let Some(toml_path) = find_file_manifest(file_path) { - resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| LspError::WorkspaceResolutionError(err.to_string())) - } else { - let Some(parent_folder) = file_path - .parent() - .and_then(|f| f.file_name()) - .and_then(|file_name_os_str| file_name_os_str.to_str()) - else { - return Err(LspError::WorkspaceResolutionError(format!( - "Could not resolve parent folder for file: {:?}", - file_path - ))); - }; - let assumed_package = Package { - version: None, - compiler_required_version: Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - root_dir: PathBuf::from(parent_folder), - package_type: PackageType::Binary, - entry_path: PathBuf::from(file_path), - name: CrateName::from_str(parent_folder) - .map_err(|err| LspError::WorkspaceResolutionError(err.to_string()))?, - dependencies: BTreeMap::new(), - }; - let workspace = Workspace { - root_dir: PathBuf::from(parent_folder), - members: vec![assumed_package], - selected_package_index: Some(0), - is_assumed: true, - }; - Ok(workspace) +pub(crate) fn resolve_workspace_for_source_path( + file_path: &Path, + root_path: &Option, +) -> Result { + // If there's a LSP root path, starting from file_path go up the directory tree + // searching for Nargo.toml files. The last one we find is the one we'll use + // (we'll assume Noir workspaces aren't nested) + if let Some(root_path) = root_path { + let mut current_path = file_path; + let mut current_toml_path = None; + while current_path.starts_with(root_path) { + if let Some(toml_path) = find_file_manifest(current_path) { + current_toml_path = Some(toml_path); + + if let Some(next_path) = current_path.parent() { + current_path = next_path; + } else { + break; + } + } else { + break; + } + } + + if let Some(toml_path) = current_toml_path { + return resolve_workspace_from_toml( + &toml_path, + PackageSelection::All, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) + .map_err(|err| LspError::WorkspaceResolutionError(err.to_string())); + } } + + let Some(parent_folder) = file_path + .parent() + .and_then(|f| f.file_name()) + .and_then(|file_name_os_str| file_name_os_str.to_str()) + else { + return Err(LspError::WorkspaceResolutionError(format!( + "Could not resolve parent folder for file: {:?}", + file_path + ))); + }; + let assumed_package = Package { + version: None, + compiler_required_version: Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + root_dir: PathBuf::from(parent_folder), + package_type: PackageType::Binary, + entry_path: PathBuf::from(file_path), + name: CrateName::from_str(parent_folder) + .map_err(|err| LspError::WorkspaceResolutionError(err.to_string()))?, + dependencies: BTreeMap::new(), + expression_width: None, + }; + let workspace = Workspace { + root_dir: PathBuf::from(parent_folder), + members: vec![assumed_package], + selected_package_index: Some(0), + is_assumed: true, + }; + Ok(workspace) +} + +pub(crate) fn workspace_package_for_file<'a>( + workspace: &'a Workspace, + file_path: &Path, +) -> Option<&'a Package> { + workspace.members.iter().find(|package| file_path.starts_with(&package.root_dir)) } pub(crate) fn prepare_package<'file_manager, 'parsed_files>( @@ -303,7 +342,7 @@ fn prepare_source(source: String, state: &mut LspState) -> (Context<'static, 'st } fn parse_diff(file_manager: &FileManager, state: &mut LspState) -> ParsedFiles { - if state.parsing_cache_enabled { + if state.options.enable_parsing_cache { let noir_file_hashes: Vec<_> = file_manager .as_file_map() .all_file_ids() @@ -359,6 +398,22 @@ fn parse_diff(file_manager: &FileManager, state: &mut LspState) -> ParsedFiles { } } +pub fn insert_all_files_for_workspace_into_file_manager( + state: &LspState, + workspace: &workspace::Workspace, + file_manager: &mut FileManager, +) { + // First add files we cached: these have the source code of files that are modified + // but not saved to disk yet, and we want to make sure all LSP features work well + // according to these unsaved buffers, not what's saved on disk. + for (path, source) in &state.input_files { + let path = path.strip_prefix("file://").unwrap(); + file_manager.add_file_with_source_canonical_path(Path::new(path), source.clone()); + } + + nargo::insert_all_files_for_workspace_into_file_manager(workspace, file_manager); +} + #[test] fn prepare_package_from_source_string() { let source = r#" @@ -373,8 +428,7 @@ fn prepare_package_from_source_string() { let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); - let _check_result = - noirc_driver::check_crate(&mut context, crate_id, false, false, false, None); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false, None); let main_func_id = context.get_main_function(&crate_id); assert!(main_func_id.is_some()); } diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index 46a7b1cf866..24409e85db8 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -1,11 +1,10 @@ use std::ops::ControlFlow; +use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use nargo::insert_all_files_for_workspace_into_file_manager; use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; -use crate::requests::collect_lenses_for_package; use crate::types::{ notification, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, @@ -13,8 +12,8 @@ use crate::types::{ }; use crate::{ - byte_span_to_range, get_package_tests_in_crate, parse_diff, prepare_source, - resolve_workspace_for_source_path, LspState, + byte_span_to_range, get_package_tests_in_crate, parse_diff, resolve_workspace_for_source_path, + LspState, }; pub(super) fn on_initialized( @@ -38,8 +37,15 @@ pub(super) fn on_did_open_text_document( state.input_files.insert(params.text_document.uri.to_string(), params.text_document.text); let document_uri = params.text_document.uri; - - match process_noir_document(document_uri, state) { + let only_process_document_uri_package = false; + let output_diagnostics = true; + + match process_workspace_for_noir_document( + state, + document_uri, + only_process_document_uri_package, + output_diagnostics, + ) { Ok(_) => { state.open_documents_count += 1; ControlFlow::Continue(()) @@ -55,37 +61,19 @@ pub(super) fn on_did_change_text_document( let text = params.content_changes.into_iter().next().unwrap().text; state.input_files.insert(params.text_document.uri.to_string(), text.clone()); - let (mut context, crate_id) = prepare_source(text, state); - let _ = check_crate(&mut context, crate_id, false, false, false, None); - - let workspace = match resolve_workspace_for_source_path( - params.text_document.uri.to_file_path().unwrap().as_path(), + let document_uri = params.text_document.uri; + let only_process_document_uri_package = true; + let output_diagnotics = false; + + match process_workspace_for_noir_document( + state, + document_uri, + only_process_document_uri_package, + output_diagnotics, ) { - Ok(workspace) => workspace, - Err(lsp_error) => { - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - lsp_error.to_string(), - ) - .into())) - } - }; - let package = match workspace.members.first() { - Some(package) => package, - None => { - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - "Selected workspace has no members", - ) - .into())) - } - }; - - let lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); - - state.cached_lenses.insert(params.text_document.uri.to_string(), lenses); - - ControlFlow::Continue(()) + Ok(_) => ControlFlow::Continue(()), + Err(err) => ControlFlow::Break(Err(err)), + } } pub(super) fn on_did_close_text_document( @@ -101,7 +89,19 @@ pub(super) fn on_did_close_text_document( state.cached_definitions.clear(); } - ControlFlow::Continue(()) + let document_uri = params.text_document.uri; + let only_process_document_uri_package = true; + let output_diagnotics = false; + + match process_workspace_for_noir_document( + state, + document_uri, + only_process_document_uri_package, + output_diagnotics, + ) { + Ok(_) => ControlFlow::Continue(()), + Err(err) => ControlFlow::Break(Err(err)), + } } pub(super) fn on_did_save_text_document( @@ -109,43 +109,63 @@ pub(super) fn on_did_save_text_document( params: DidSaveTextDocumentParams, ) -> ControlFlow> { let document_uri = params.text_document.uri; - - match process_noir_document(document_uri, state) { + let only_process_document_uri_package = false; + let output_diagnotics = true; + + match process_workspace_for_noir_document( + state, + document_uri, + only_process_document_uri_package, + output_diagnotics, + ) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } } -fn process_noir_document( - document_uri: lsp_types::Url, +// Given a Noir document, find the workspace it's contained in (an assumed workspace is created if +// it's only contained in a package), then type-checks the workspace's packages, +// caching code lenses and type definitions, and notifying about compilation errors. +pub(crate) fn process_workspace_for_noir_document( state: &mut LspState, + document_uri: lsp_types::Url, + only_process_document_uri_package: bool, + output_diagnostics: bool, ) -> Result<(), async_lsp::Error> { let file_path = document_uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let workspace = resolve_workspace_for_source_path(&file_path).map_err(|lsp_error| { - ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) - })?; + let workspace = + resolve_workspace_for_source_path(&file_path, &state.root_path).map_err(|lsp_error| { + ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) + })?; let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + insert_all_files_for_workspace_into_file_manager( + state, + &workspace, + &mut workspace_file_manager, + ); let parsed_files = parse_diff(&workspace_file_manager, state); let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { + let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); + + if only_process_document_uri_package && !file_path.starts_with(&package.root_dir) { + return vec![]; + } + let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - let file_diagnostics = - match check_crate(&mut context, crate_id, false, false, false, None) { - Ok(((), warnings)) => warnings, - Err(errors_and_warnings) => errors_and_warnings, - }; - - let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); + let file_diagnostics = match check_crate(&mut context, crate_id, false, false, None) { + Ok(((), warnings)) => warnings, + Err(errors_and_warnings) => errors_and_warnings, + }; // We don't add test headings for a package if it contains no `#[test]` functions if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { @@ -169,46 +189,53 @@ fn process_noir_document( let fm = &context.file_manager; let files = fm.as_file_map(); - file_diagnostics - .into_iter() - .filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| { - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id).expect("file must exist to have emitted diagnostic") - != file_path - { - return None; - } - - // TODO: Should this be the first item in secondaries? Should we bail when we find a range? - let range = diagnostic - .secondaries - .into_iter() - .filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into())) - .last() - .unwrap_or_default(); - - let severity = match diagnostic.kind { - DiagnosticKind::Error => DiagnosticSeverity::ERROR, - DiagnosticKind::Warning => DiagnosticSeverity::WARNING, - DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, - DiagnosticKind::Bug => DiagnosticSeverity::WARNING, - }; - Some(Diagnostic { - range, - severity: Some(severity), - message: diagnostic.message, - ..Default::default() + if output_diagnostics { + file_diagnostics + .into_iter() + .filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| { + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if fm.path(file_id).expect("file must exist to have emitted diagnostic") + != file_path + { + return None; + } + + // TODO: Should this be the first item in secondaries? Should we bail when we find a range? + let range = diagnostic + .secondaries + .into_iter() + .filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into())) + .last() + .unwrap_or_default(); + + let severity = match diagnostic.kind { + DiagnosticKind::Error => DiagnosticSeverity::ERROR, + DiagnosticKind::Warning => DiagnosticSeverity::WARNING, + DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, + DiagnosticKind::Bug => DiagnosticSeverity::WARNING, + }; + Some(Diagnostic { + range, + severity: Some(severity), + message: diagnostic.message, + ..Default::default() + }) }) - }) - .collect() + .collect() + } else { + vec![] + } }) .collect(); - let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { - uri: document_uri, - version: None, - diagnostics, - }); + + if output_diagnostics { + let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { + uri: document_uri, + version: None, + diagnostics, + }); + } Ok(()) } @@ -219,3 +246,82 @@ pub(super) fn on_exit( ) -> ControlFlow> { ControlFlow::Continue(()) } + +#[cfg(test)] +mod notification_tests { + use crate::test_utils; + + use super::*; + use lsp_types::{ + InlayHintLabel, InlayHintParams, Position, TextDocumentContentChangeEvent, + TextDocumentIdentifier, TextDocumentItem, VersionedTextDocumentIdentifier, + WorkDoneProgressParams, + }; + use tokio::test; + + #[test] + async fn test_caches_open_files() { + let (mut state, noir_text_document) = test_utils::init_lsp_server("inlay_hints").await; + + // Open the document, fake the text to be empty + on_did_open_text_document( + &mut state, + DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: noir_text_document.clone(), + language_id: "noir".to_string(), + version: 0, + text: "".to_string(), + }, + }, + ); + + // Fake the text to change to "global a = 1;" + on_did_change_text_document( + &mut state, + DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: noir_text_document.clone(), + version: 1, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: None, + range_length: None, + // Should get an inlay hint for ": bool" after "a" + text: "global a = true;".to_string(), + }], + }, + ); + + // Get inlay hints. These should now be relative to the changed text, + // not the saved file's text. + let inlay_hints = crate::requests::on_inlay_hint_request( + &mut state, + InlayHintParams { + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + text_document: TextDocumentIdentifier { uri: noir_text_document }, + range: lsp_types::Range { + start: lsp_types::Position { line: 0, character: 0 }, + end: lsp_types::Position { line: 1, character: 0 }, + }, + }, + ) + .await + .expect("Could not execute on_inlay_hint_request") + .unwrap(); + + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 0, character: 8 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "bool"); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + } +} diff --git a/tooling/lsp/src/requests/code_lens_request.rs b/tooling/lsp/src/requests/code_lens_request.rs index 0b8803edc6f..51336a324da 100644 --- a/tooling/lsp/src/requests/code_lens_request.rs +++ b/tooling/lsp/src/requests/code_lens_request.rs @@ -21,6 +21,8 @@ const INFO_COMMAND: &str = "nargo.info"; const INFO_CODELENS_TITLE: &str = "Info"; const EXECUTE_COMMAND: &str = "nargo.execute"; const EXECUTE_CODELENS_TITLE: &str = "Execute"; +const DEBUG_COMMAND: &str = "nargo.debug.dap"; +const DEBUG_CODELENS_TITLE: &str = "Debug"; const PROFILE_COMMAND: &str = "nargo.profile"; const PROFILE_CODELENS_TITLE: &str = "Profile"; @@ -61,13 +63,17 @@ fn on_code_lens_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not read file from disk") })?; - let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); - let package = workspace.members.first().unwrap(); + let workspace = + resolve_workspace_for_source_path(file_path.as_path(), &state.root_path).unwrap(); + + let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") + })?; let (mut context, crate_id) = prepare_source(source_string, state); // We ignore the warnings and errors produced by compilation for producing code lenses // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false, false, None); + let _ = check_crate(&mut context, crate_id, false, false, None); let collected_lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); @@ -154,35 +160,22 @@ pub(crate) fn collect_lenses_for_package( lenses.push(compile_lens); - let info_command = Command { - title: INFO_CODELENS_TITLE.to_string(), - command: INFO_COMMAND.into(), - arguments: Some(package_selection_args(workspace, package)), - }; - - let info_lens = CodeLens { range, command: Some(info_command), data: None }; - - lenses.push(info_lens); - - let execute_command = Command { - title: EXECUTE_CODELENS_TITLE.to_string(), - command: EXECUTE_COMMAND.into(), - arguments: Some(package_selection_args(workspace, package)), - }; - - let execute_lens = CodeLens { range, command: Some(execute_command), data: None }; - - lenses.push(execute_lens); - - let profile_command = Command { - title: PROFILE_CODELENS_TITLE.to_string(), - command: PROFILE_COMMAND.into(), - arguments: Some(package_selection_args(workspace, package)), - }; - - let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; - - lenses.push(profile_lens); + let internal_command_lenses = [ + (INFO_CODELENS_TITLE, INFO_COMMAND), + (EXECUTE_CODELENS_TITLE, EXECUTE_COMMAND), + (PROFILE_CODELENS_TITLE, PROFILE_COMMAND), + (DEBUG_CODELENS_TITLE, DEBUG_COMMAND), + ] + .map(|(title, command)| { + let command = Command { + title: title.to_string(), + command: command.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + CodeLens { range, command: Some(command), data: None } + }); + + lenses.append(&mut Vec::from(internal_command_lenses)); } } diff --git a/tooling/lsp/src/requests/document_symbol.rs b/tooling/lsp/src/requests/document_symbol.rs new file mode 100644 index 00000000000..67e2505d8fd --- /dev/null +++ b/tooling/lsp/src/requests/document_symbol.rs @@ -0,0 +1,746 @@ +use std::future::{self, Future}; + +use async_lsp::ResponseError; +use fm::{FileId, FileMap, PathString}; +use lsp_types::{ + DocumentSymbol, DocumentSymbolParams, DocumentSymbolResponse, Location, Position, SymbolKind, + TextDocumentPositionParams, +}; +use noirc_errors::Span; +use noirc_frontend::{ + ast::{ + Expression, FunctionReturnType, Ident, LetStatement, NoirFunction, NoirStruct, NoirTrait, + NoirTraitImpl, TraitImplItem, TraitItem, TypeImpl, UnresolvedType, UnresolvedTypeData, + }, + parser::{Item, ItemKind, ParsedSubModule}, + ParsedModule, +}; + +use crate::LspState; + +use super::process_request; + +pub(crate) fn on_document_symbol_request( + state: &mut LspState, + params: DocumentSymbolParams, +) -> impl Future, ResponseError>> { + let Ok(file_path) = params.text_document.uri.to_file_path() else { + return future::ready(Ok(None)); + }; + + let text_document_position_params = TextDocumentPositionParams { + text_document: params.text_document.clone(), + position: Position { line: 0, character: 0 }, + }; + + let result = process_request(state, text_document_position_params, |args| { + args.files.get_file_id(&PathString::from_path(file_path)).map(|file_id| { + let file = args.files.get_file(file_id).unwrap(); + let source = file.source(); + let (parsed_module, _errors) = noirc_frontend::parse_program(source); + + let mut collector = DocumentSymbolCollector::new(file_id, args.files); + let mut symbols = Vec::new(); + collector.collect_in_parsed_module(&parsed_module, &mut symbols); + DocumentSymbolResponse::Nested(symbols) + }) + }); + + future::ready(result) +} + +struct DocumentSymbolCollector<'a> { + file_id: FileId, + files: &'a FileMap, +} + +impl<'a> DocumentSymbolCollector<'a> { + fn new(file_id: FileId, files: &'a FileMap) -> Self { + Self { file_id, files } + } + + fn collect_in_parsed_module( + &mut self, + parsed_module: &ParsedModule, + symbols: &mut Vec, + ) { + for item in &parsed_module.items { + self.collect_in_item(item, symbols); + } + } + + fn collect_in_item(&mut self, item: &Item, symbols: &mut Vec) { + match &item.kind { + ItemKind::Function(noir_function) => { + self.collect_in_noir_function(noir_function, item.span, symbols); + } + ItemKind::Struct(noir_struct) => { + self.collect_in_noir_struct(noir_struct, item.span, symbols); + } + ItemKind::Trait(noir_trait) => { + self.collect_in_noir_trait(noir_trait, item.span, symbols); + } + ItemKind::TraitImpl(noir_trait_impl) => { + self.collect_in_noir_trait_impl(noir_trait_impl, item.span, symbols); + } + ItemKind::Impl(type_impl) => { + self.collect_in_type_impl(type_impl, item.span, symbols); + } + ItemKind::Submodules(parsed_sub_module) => { + self.collect_in_parsed_sub_module(parsed_sub_module, item.span, symbols); + } + ItemKind::Global(let_statement) => { + self.collect_in_global(let_statement, item.span, symbols); + } + ItemKind::Import(..) | ItemKind::TypeAlias(..) | ItemKind::ModuleDecl(..) => (), + } + } + + fn collect_in_noir_function( + &mut self, + noir_function: &NoirFunction, + span: Span, + symbols: &mut Vec, + ) { + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + let Some(selection_location) = self.to_lsp_location(noir_function.name_ident().span()) + else { + return; + }; + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: noir_function.name().to_string(), + detail: Some(noir_function.def.signature()), + kind: SymbolKind::FUNCTION, + tags: None, + deprecated: None, + range: location.range, + selection_range: selection_location.range, + children: None, + }); + } + + fn collect_in_noir_struct( + &mut self, + noir_struct: &NoirStruct, + span: Span, + symbols: &mut Vec, + ) { + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + let Some(selection_location) = self.to_lsp_location(noir_struct.name.span()) else { + return; + }; + + let mut children = Vec::new(); + for (field_name, typ) in &noir_struct.fields { + let span = if let Some(typ) = typ.span { + Span::from(field_name.span().start()..typ.end()) + } else { + field_name.span() + }; + + let Some(field_location) = self.to_lsp_location(span) else { + continue; + }; + + let Some(field_name_location) = self.to_lsp_location(field_name.span()) else { + continue; + }; + + #[allow(deprecated)] + children.push(DocumentSymbol { + name: field_name.to_string(), + detail: None, + kind: SymbolKind::FIELD, + tags: None, + deprecated: None, + range: field_location.range, + selection_range: field_name_location.range, + children: None, + }); + } + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: noir_struct.name.to_string(), + detail: None, + kind: SymbolKind::STRUCT, + tags: None, + deprecated: None, + range: location.range, + selection_range: selection_location.range, + children: Some(children), + }); + } + + fn collect_in_noir_trait( + &mut self, + noir_trait: &NoirTrait, + span: Span, + symbols: &mut Vec, + ) { + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + let Some(selection_location) = self.to_lsp_location(noir_trait.name.span()) else { + return; + }; + + let mut children = Vec::new(); + for item in &noir_trait.items { + self.collect_in_noir_trait_item(item, &mut children); + } + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: noir_trait.name.to_string(), + detail: None, + kind: SymbolKind::INTERFACE, + tags: None, + deprecated: None, + range: location.range, + selection_range: selection_location.range, + children: Some(children), + }); + } + + fn collect_in_noir_trait_item( + &mut self, + trait_item: &TraitItem, + symbols: &mut Vec, + ) { + // Ideally `TraitItem` has a `span` for the entire definition, and we'd use that + // for the `range` property. For now we do our best to find a reasonable span. + match trait_item { + TraitItem::Function { name, parameters, return_type, body, .. } => { + let Some(name_location) = self.to_lsp_location(name.span()) else { + return; + }; + + let mut span = name.span(); + + // If there are parameters, extend the span to include the last parameter. + if let Some((param_name, _param_type)) = parameters.last() { + span = Span::from(span.start()..param_name.span().end()); + } + + // If there's a return type, extend the span to include it + match return_type { + FunctionReturnType::Default(return_type_span) => { + span = Span::from(span.start()..return_type_span.end()); + } + FunctionReturnType::Ty(typ) => { + if let Some(type_span) = typ.span { + span = Span::from(span.start()..type_span.end()); + } + } + } + + // If there's a body, extend the span to include it + if let Some(body) = body { + if let Some(statement) = body.statements.last() { + span = Span::from(span.start()..statement.span.end()); + } + } + + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: name.to_string(), + detail: None, + kind: SymbolKind::METHOD, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: None, + }); + } + TraitItem::Constant { name, typ, default_value } => { + self.collect_in_constant(name, typ, default_value.as_ref(), symbols); + } + TraitItem::Type { name } => { + self.collect_in_type(name, None, symbols); + } + } + } + + fn collect_in_constant( + &mut self, + name: &Ident, + typ: &UnresolvedType, + default_value: Option<&Expression>, + symbols: &mut Vec, + ) { + let Some(name_location) = self.to_lsp_location(name.span()) else { + return; + }; + + let mut span = name.span(); + + // If there's a type span, extend the span to include it + if let Some(type_span) = typ.span { + span = Span::from(span.start()..type_span.end()); + } + + // If there's a default value, extend the span to include it + if let Some(default_value) = default_value { + span = Span::from(span.start()..default_value.span.end()); + } + + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: name.to_string(), + detail: None, + kind: SymbolKind::CONSTANT, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: None, + }); + } + + fn collect_in_type( + &mut self, + name: &Ident, + typ: Option<&UnresolvedType>, + symbols: &mut Vec, + ) { + let Some(name_location) = self.to_lsp_location(name.span()) else { + return; + }; + + let span = if let Some(type_span) = typ.and_then(|typ| typ.span) { + Span::from(name.span().start()..type_span.end()) + } else { + name.span() + }; + + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: name.to_string(), + detail: None, + kind: SymbolKind::TYPE_PARAMETER, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: None, + }); + } + + fn collect_in_noir_trait_impl( + &mut self, + noir_trait_impl: &NoirTraitImpl, + span: Span, + symbols: &mut Vec, + ) { + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + let Some(name_location) = self.to_lsp_location(noir_trait_impl.trait_name.span) else { + return; + }; + + let mut trait_name = String::new(); + trait_name.push_str(&noir_trait_impl.trait_name.to_string()); + if !noir_trait_impl.trait_generics.is_empty() { + trait_name.push('<'); + for (index, generic) in noir_trait_impl.trait_generics.iter().enumerate() { + if index > 0 { + trait_name.push_str(", "); + } + trait_name.push_str(&generic.to_string()); + } + trait_name.push('>'); + } + + let mut children = Vec::new(); + for trait_impl_item in &noir_trait_impl.items { + self.collect_in_trait_impl_item(trait_impl_item, &mut children); + } + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: format!("impl {} for {}", trait_name, noir_trait_impl.object_type), + detail: None, + kind: SymbolKind::NAMESPACE, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: Some(children), + }); + } + + fn collect_in_trait_impl_item( + &mut self, + trait_impl_item: &TraitImplItem, + symbols: &mut Vec, + ) { + match trait_impl_item { + TraitImplItem::Function(noir_function) => { + let span = Span::from( + noir_function.name_ident().span().start()..noir_function.span().end(), + ); + self.collect_in_noir_function(noir_function, span, symbols); + } + TraitImplItem::Constant(name, typ, default_value) => { + self.collect_in_constant(name, typ, Some(default_value), symbols); + } + TraitImplItem::Type { name, alias } => self.collect_in_type(name, Some(alias), symbols), + } + } + + fn collect_in_type_impl( + &mut self, + type_impl: &TypeImpl, + span: Span, + symbols: &mut Vec, + ) { + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + let UnresolvedTypeData::Named(name_path, ..) = &type_impl.object_type.typ else { + return; + }; + + let name = name_path.last_segment(); + + let Some(name_location) = self.to_lsp_location(name.span()) else { + return; + }; + + let mut children = Vec::new(); + for (noir_function, noir_function_span) in &type_impl.methods { + self.collect_in_noir_function(noir_function, *noir_function_span, &mut children); + } + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: name.to_string(), + detail: None, + kind: SymbolKind::NAMESPACE, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: Some(children), + }); + } + + fn collect_in_parsed_sub_module( + &mut self, + parsed_sub_module: &ParsedSubModule, + span: Span, + symbols: &mut Vec, + ) { + let Some(name_location) = self.to_lsp_location(parsed_sub_module.name.span()) else { + return; + }; + + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + let mut children = Vec::new(); + for item in &parsed_sub_module.contents.items { + self.collect_in_item(item, &mut children); + } + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: parsed_sub_module.name.to_string(), + detail: None, + kind: SymbolKind::MODULE, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: Some(children), + }); + } + + fn collect_in_global( + &mut self, + global: &LetStatement, + span: Span, + symbols: &mut Vec, + ) { + let Some(name_location) = self.to_lsp_location(global.pattern.span()) else { + return; + }; + + let Some(location) = self.to_lsp_location(span) else { + return; + }; + + #[allow(deprecated)] + symbols.push(DocumentSymbol { + name: global.pattern.to_string(), + detail: None, + kind: SymbolKind::CONSTANT, + tags: None, + deprecated: None, + range: location.range, + selection_range: name_location.range, + children: None, + }); + } + + fn to_lsp_location(&self, span: Span) -> Option { + super::to_lsp_location(self.files, self.file_id, span) + } +} + +#[cfg(test)] +mod document_symbol_tests { + use crate::test_utils; + + use super::*; + use lsp_types::{ + PartialResultParams, Range, SymbolKind, TextDocumentIdentifier, WorkDoneProgressParams, + }; + use tokio::test; + + #[test] + async fn test_document_symbol() { + let (mut state, noir_text_document) = test_utils::init_lsp_server("document_symbol").await; + + let response = on_document_symbol_request( + &mut state, + DocumentSymbolParams { + text_document: TextDocumentIdentifier { uri: noir_text_document }, + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + partial_result_params: PartialResultParams { partial_result_token: None }, + }, + ) + .await + .expect("Could not execute on_document_symbol_request") + .unwrap(); + + let DocumentSymbolResponse::Nested(symbols) = response else { + panic!("Expected response to be nested"); + }; + + assert_eq!( + symbols, + vec![ + #[allow(deprecated)] + DocumentSymbol { + name: "foo".to_string(), + detail: Some("fn foo(_x: i32)".to_string()), + kind: SymbolKind::FUNCTION, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 0, character: 0 }, + end: Position { line: 2, character: 1 }, + }, + selection_range: Range { + start: Position { line: 0, character: 3 }, + end: Position { line: 0, character: 6 }, + }, + children: None, + }, + #[allow(deprecated)] + DocumentSymbol { + name: "SomeStruct".to_string(), + detail: None, + kind: SymbolKind::STRUCT, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 4, character: 0 }, + end: Position { line: 6, character: 1 }, + }, + selection_range: Range { + start: Position { line: 4, character: 7 }, + end: Position { line: 4, character: 17 }, + }, + children: Some(vec![ + #[allow(deprecated)] + DocumentSymbol { + name: "field".to_string(), + detail: None, + kind: SymbolKind::FIELD, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 5, character: 4 }, + end: Position { line: 5, character: 14 }, + }, + selection_range: Range { + start: Position { line: 5, character: 4 }, + end: Position { line: 5, character: 9 }, + }, + children: None, + }, + ],), + }, + #[allow(deprecated)] + DocumentSymbol { + name: "SomeStruct".to_string(), + detail: None, + kind: SymbolKind::NAMESPACE, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 8, character: 0 }, + end: Position { line: 12, character: 1 }, + }, + selection_range: Range { + start: Position { line: 8, character: 5 }, + end: Position { line: 8, character: 15 }, + }, + children: Some(vec![ + #[allow(deprecated)] + DocumentSymbol { + name: "new".to_string(), + detail: Some("fn new() -> SomeStruct".to_string()), + kind: SymbolKind::FUNCTION, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 9, character: 4 }, + end: Position { line: 11, character: 5 }, + }, + selection_range: Range { + start: Position { line: 9, character: 7 }, + end: Position { line: 9, character: 10 }, + }, + children: None, + }, + ],), + }, + #[allow(deprecated)] + DocumentSymbol { + name: "SomeTrait".to_string(), + detail: None, + kind: SymbolKind::INTERFACE, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 14, character: 0 }, + end: Position { line: 16, character: 1 }, + }, + selection_range: Range { + start: Position { line: 14, character: 6 }, + end: Position { line: 14, character: 15 }, + }, + children: Some(vec![ + #[allow(deprecated)] + DocumentSymbol { + name: "some_method".to_string(), + detail: None, + kind: SymbolKind::METHOD, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 15, character: 7 }, + end: Position { line: 15, character: 25 }, + }, + selection_range: Range { + start: Position { line: 15, character: 7 }, + end: Position { line: 15, character: 18 }, + }, + children: None, + }, + ],), + }, + #[allow(deprecated)] + DocumentSymbol { + name: "impl SomeTrait for SomeStruct".to_string(), + detail: None, + kind: SymbolKind::NAMESPACE, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 18, character: 0 }, + end: Position { line: 21, character: 1 }, + }, + selection_range: Range { + start: Position { line: 18, character: 5 }, + end: Position { line: 18, character: 14 }, + }, + children: Some(vec![ + #[allow(deprecated)] + DocumentSymbol { + name: "some_method".to_string(), + detail: Some("fn some_method(_x: i32)".to_string()), + kind: SymbolKind::FUNCTION, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 19, character: 7 }, + end: Position { line: 20, character: 5 }, + }, + selection_range: Range { + start: Position { line: 19, character: 7 }, + end: Position { line: 19, character: 18 }, + }, + children: None, + }, + ],), + }, + #[allow(deprecated)] + DocumentSymbol { + name: "submodule".to_string(), + detail: None, + kind: SymbolKind::MODULE, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 23, character: 0 }, + end: Position { line: 25, character: 1 }, + }, + selection_range: Range { + start: Position { line: 23, character: 4 }, + end: Position { line: 23, character: 13 }, + }, + children: Some(vec![ + #[allow(deprecated)] + DocumentSymbol { + name: "SOME_GLOBAL".to_string(), + detail: None, + kind: SymbolKind::CONSTANT, + tags: None, + deprecated: None, + range: Range { + start: Position { line: 24, character: 4 }, + end: Position { line: 24, character: 27 } + }, + selection_range: Range { + start: Position { line: 24, character: 11 }, + end: Position { line: 24, character: 22 } + }, + children: None + } + ]), + }, + ] + ); + } +} diff --git a/tooling/lsp/src/requests/goto_declaration.rs b/tooling/lsp/src/requests/goto_declaration.rs index 627cd8d203e..bd0f0afb827 100644 --- a/tooling/lsp/src/requests/goto_declaration.rs +++ b/tooling/lsp/src/requests/goto_declaration.rs @@ -20,10 +20,10 @@ fn on_goto_definition_inner( state: &mut LspState, params: GotoDeclarationParams, ) -> Result { - process_request(state, params.text_document_position_params, |location, interner, files| { - interner.get_declaration_location_from(location).and_then(|found_location| { + process_request(state, params.text_document_position_params, |args| { + args.interner.get_declaration_location_from(args.location).and_then(|found_location| { let file_id = found_location.file; - let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let definition_position = to_lsp_location(args.files, file_id, found_location.span)?; let response = GotoDeclarationResponse::from(definition_position).to_owned(); Some(response) }) diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs index 3713e8b646a..5e655766024 100644 --- a/tooling/lsp/src/requests/goto_definition.rs +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -29,15 +29,21 @@ fn on_goto_definition_inner( params: GotoDefinitionParams, return_type_location_instead: bool, ) -> Result { - process_request(state, params.text_document_position_params, |location, interner, files| { - interner.get_definition_location_from(location, return_type_location_instead).and_then( - |found_location| { + process_request(state, params.text_document_position_params, |args| { + args.interner + .get_definition_location_from(args.location, return_type_location_instead) + .or_else(|| { + args.interner + .reference_at_location(args.location) + .map(|reference| args.interner.reference_location(reference)) + }) + .and_then(|found_location| { let file_id = found_location.file; - let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let definition_position = + to_lsp_location(args.files, file_id, found_location.span)?; let response = GotoDefinitionResponse::from(definition_position).to_owned(); Some(response) - }, - ) + }) }) } @@ -201,4 +207,32 @@ mod goto_definition_tests { async fn goto_for_local_variable() { expect_goto_for_all_references("local_variable", "some_var", 0).await; } + + #[test] + async fn goto_at_struct_definition_finds_same_struct() { + expect_goto( + "go_to_definition", + Position { line: 21, character: 7 }, // "Foo" in "struct Foo" + "src/main.nr", + Range { + start: Position { line: 21, character: 7 }, + end: Position { line: 21, character: 10 }, + }, + ) + .await; + } + + #[test] + async fn goto_at_trait_definition_finds_same_trait() { + expect_goto( + "go_to_definition", + Position { line: 25, character: 6 }, // "Trait" in "trait Trait" + "src/main.nr", + Range { + start: Position { line: 25, character: 6 }, + end: Position { line: 25, character: 11 }, + }, + ) + .await; + } } diff --git a/tooling/lsp/src/requests/hover.rs b/tooling/lsp/src/requests/hover.rs new file mode 100644 index 00000000000..161fd20f555 --- /dev/null +++ b/tooling/lsp/src/requests/hover.rs @@ -0,0 +1,641 @@ +use std::future::{self, Future}; + +use async_lsp::ResponseError; +use lsp_types::{Hover, HoverContents, HoverParams, MarkupContent, MarkupKind}; +use noirc_frontend::{ + ast::Visibility, + graph::CrateId, + hir::def_map::ModuleId, + hir_def::stmt::HirPattern, + macros_api::{NodeInterner, StructId}, + node_interner::{ + DefinitionId, DefinitionKind, FuncId, GlobalId, ReferenceId, TraitId, TypeAliasId, + }, + Generics, Type, +}; + +use crate::LspState; + +use super::{process_request, to_lsp_location, ProcessRequestCallbackArgs}; + +pub(crate) fn on_hover_request( + state: &mut LspState, + params: HoverParams, +) -> impl Future, ResponseError>> { + let result = process_request(state, params.text_document_position_params, |args| { + args.interner.reference_at_location(args.location).map(|reference| { + let location = args.interner.reference_location(reference); + let lsp_location = to_lsp_location(args.files, location.file, location.span); + Hover { + range: lsp_location.map(|location| location.range), + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format_reference(reference, &args), + }), + } + }) + }); + + future::ready(result) +} + +fn format_reference(reference: ReferenceId, args: &ProcessRequestCallbackArgs) -> String { + match reference { + ReferenceId::Module(id) => format_module(id, args), + ReferenceId::Struct(id) => format_struct(id, args), + ReferenceId::StructMember(id, field_index) => format_struct_member(id, field_index, args), + ReferenceId::Trait(id) => format_trait(id, args), + ReferenceId::Global(id) => format_global(id, args), + ReferenceId::Function(id) => format_function(id, args), + ReferenceId::Alias(id) => format_alias(id, args), + ReferenceId::Local(id) => format_local(id, args), + ReferenceId::Reference(location, _) => { + format_reference(args.interner.find_referenced(location).unwrap(), args) + } + } +} +fn format_module(id: ModuleId, args: &ProcessRequestCallbackArgs) -> String { + let module_attributes = args.interner.module_attributes(&id); + + let mut string = String::new(); + if format_parent_module_from_module_id( + &ModuleId { krate: id.krate, local_id: module_attributes.parent }, + args, + &mut string, + ) { + string.push('\n'); + } + string.push_str(" "); + string.push_str("mod "); + string.push_str(&module_attributes.name); + string +} + +fn format_struct(id: StructId, args: &ProcessRequestCallbackArgs) -> String { + let struct_type = args.interner.get_struct(id); + let struct_type = struct_type.borrow(); + + let mut string = String::new(); + if format_parent_module(ReferenceId::Struct(id), args, &mut string) { + string.push('\n'); + } + string.push_str(" "); + string.push_str("struct "); + string.push_str(&struct_type.name.0.contents); + format_generics(&struct_type.generics, &mut string); + string.push_str(" {\n"); + for (field_name, field_type) in struct_type.get_fields_as_written() { + string.push_str(" "); + string.push_str(&field_name); + string.push_str(": "); + string.push_str(&format!("{}", field_type)); + string.push_str(",\n"); + } + string.push_str(" }"); + string +} + +fn format_struct_member( + id: StructId, + field_index: usize, + args: &ProcessRequestCallbackArgs, +) -> String { + let struct_type = args.interner.get_struct(id); + let struct_type = struct_type.borrow(); + let (field_name, field_type) = struct_type.field_at(field_index); + + let mut string = String::new(); + if format_parent_module(ReferenceId::Struct(id), args, &mut string) { + string.push_str("::"); + } + string.push_str(&struct_type.name.0.contents); + string.push('\n'); + string.push_str(" "); + string.push_str(&field_name.0.contents); + string.push_str(": "); + string.push_str(&format!("{}", field_type)); + string +} + +fn format_trait(id: TraitId, args: &ProcessRequestCallbackArgs) -> String { + let a_trait = args.interner.get_trait(id); + + let mut string = String::new(); + if format_parent_module(ReferenceId::Trait(id), args, &mut string) { + string.push('\n'); + } + string.push_str(" "); + string.push_str("trait "); + string.push_str(&a_trait.name.0.contents); + format_generics(&a_trait.generics, &mut string); + string +} + +fn format_global(id: GlobalId, args: &ProcessRequestCallbackArgs) -> String { + let global_info = args.interner.get_global(id); + let definition_id = global_info.definition_id; + let typ = args.interner.definition_type(definition_id); + + let mut string = String::new(); + if format_parent_module(ReferenceId::Global(id), args, &mut string) { + string.push('\n'); + } + string.push_str(" "); + string.push_str("global "); + string.push_str(&global_info.ident.0.contents); + string.push_str(": "); + string.push_str(&format!("{}", typ)); + string +} + +fn format_function(id: FuncId, args: &ProcessRequestCallbackArgs) -> String { + let func_meta = args.interner.function_meta(&id); + let func_name_definition_id = args.interner.definition(func_meta.name.id); + + let mut string = String::new(); + let formatted_parent_module = + format_parent_module(ReferenceId::Function(id), args, &mut string); + let formatted_parent_struct = if let Some(struct_id) = func_meta.struct_id { + let struct_type = args.interner.get_struct(struct_id); + let struct_type = struct_type.borrow(); + if formatted_parent_module { + string.push_str("::"); + } + string.push_str(&struct_type.name.0.contents); + true + } else { + false + }; + if formatted_parent_module || formatted_parent_struct { + string.push('\n'); + } + string.push_str(" "); + string.push_str("fn "); + string.push_str(&func_name_definition_id.name); + format_generics(&func_meta.direct_generics, &mut string); + string.push('('); + let parameters = &func_meta.parameters; + for (index, (pattern, typ, visibility)) in parameters.iter().enumerate() { + format_pattern(pattern, args.interner, &mut string); + if !pattern_is_self(pattern, args.interner) { + string.push_str(": "); + if matches!(visibility, Visibility::Public) { + string.push_str("pub "); + } + string.push_str(&format!("{}", typ)); + } + if index != parameters.len() - 1 { + string.push_str(", "); + } + } + + string.push(')'); + + let return_type = func_meta.return_type(); + match return_type { + Type::Unit => (), + _ => { + string.push_str(" -> "); + string.push_str(&format!("{}", return_type)); + } + } + + string +} + +fn format_alias(id: TypeAliasId, args: &ProcessRequestCallbackArgs) -> String { + let type_alias = args.interner.get_type_alias(id); + let type_alias = type_alias.borrow(); + + let mut string = String::new(); + format_parent_module(ReferenceId::Alias(id), args, &mut string); + string.push('\n'); + string.push_str(" "); + string.push_str("type "); + string.push_str(&type_alias.name.0.contents); + string.push_str(" = "); + string.push_str(&format!("{}", &type_alias.typ)); + string +} + +fn format_local(id: DefinitionId, args: &ProcessRequestCallbackArgs) -> String { + let definition_info = args.interner.definition(id); + let DefinitionKind::Local(expr_id) = definition_info.kind else { + panic!("Expected a local reference to reference a local definition") + }; + let typ = args.interner.definition_type(id); + + let mut string = String::new(); + string.push_str(" "); + if definition_info.comptime { + string.push_str("comptime "); + } + if expr_id.is_some() { + string.push_str("let "); + } + if definition_info.mutable { + if expr_id.is_none() { + string.push_str("let "); + } + string.push_str("mut "); + } + string.push_str(&definition_info.name); + if !matches!(typ, Type::Error) { + string.push_str(": "); + string.push_str(&format!("{}", typ)); + } + string +} + +fn format_generics(generics: &Generics, string: &mut String) { + if generics.is_empty() { + return; + } + + string.push('<'); + for (index, generic) in generics.iter().enumerate() { + string.push_str(&generic.name); + if index != generics.len() - 1 { + string.push_str(", "); + } + } + string.push('>'); +} +fn format_pattern(pattern: &HirPattern, interner: &NodeInterner, string: &mut String) { + match pattern { + HirPattern::Identifier(ident) => { + let definition = interner.definition(ident.id); + string.push_str(&definition.name); + } + HirPattern::Mutable(pattern, _) => { + string.push_str("mut "); + format_pattern(pattern, interner, string); + } + HirPattern::Tuple(..) | HirPattern::Struct(..) => { + string.push('_'); + } + } +} + +fn pattern_is_self(pattern: &HirPattern, interner: &NodeInterner) -> bool { + match pattern { + HirPattern::Identifier(ident) => { + let definition = interner.definition(ident.id); + definition.name == "self" + } + HirPattern::Mutable(pattern, _) => pattern_is_self(pattern, interner), + HirPattern::Tuple(..) | HirPattern::Struct(..) => false, + } +} + +fn format_parent_module( + referenced: ReferenceId, + args: &ProcessRequestCallbackArgs, + string: &mut String, +) -> bool { + let Some(module) = args.interner.reference_module(referenced) else { + return false; + }; + + format_parent_module_from_module_id(module, args, string) +} + +fn format_parent_module_from_module_id( + module: &ModuleId, + args: &ProcessRequestCallbackArgs, + string: &mut String, +) -> bool { + let crate_id = module.krate; + let crate_name = match crate_id { + CrateId::Root(_) => Some(args.root_crate_name.clone()), + CrateId::Crate(_) => args + .root_crate_dependencies + .iter() + .find(|dep| dep.crate_id == crate_id) + .map(|dep| format!("{}", dep.name)), + CrateId::Stdlib(_) => Some("std".to_string()), + CrateId::Dummy => None, + }; + + let wrote_crate = if let Some(crate_name) = crate_name { + string.push_str(" "); + string.push_str(&crate_name); + true + } else { + false + }; + + let Some(module_attributes) = args.interner.try_module_attributes(module) else { + return wrote_crate; + }; + + if wrote_crate { + string.push_str("::"); + } else { + string.push_str(" "); + } + + let mut segments = Vec::new(); + let mut current_attributes = module_attributes; + while let Some(parent_attributes) = args.interner.try_module_attributes(&ModuleId { + krate: module.krate, + local_id: current_attributes.parent, + }) { + segments.push(&parent_attributes.name); + current_attributes = parent_attributes; + } + + for segment in segments.iter().rev() { + string.push_str(segment); + string.push_str("::"); + } + + string.push_str(&module_attributes.name); + + true +} + +#[cfg(test)] +mod hover_tests { + use crate::test_utils; + + use super::*; + use lsp_types::{ + Position, TextDocumentIdentifier, TextDocumentPositionParams, Url, WorkDoneProgressParams, + }; + use tokio::test; + + async fn assert_hover(directory: &str, file: &str, position: Position, expected_text: &str) { + let (mut state, noir_text_document) = test_utils::init_lsp_server(directory).await; + + // noir_text_document is always `src/main.nr` in the workspace directory, so let's go to the workspace dir + let noir_text_document = noir_text_document.to_file_path().unwrap(); + let workspace_dir = noir_text_document.parent().unwrap().parent().unwrap(); + + let file_uri = Url::from_file_path(workspace_dir.join(file)).unwrap(); + + let hover = on_hover_request( + &mut state, + HoverParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: file_uri }, + position, + }, + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + }, + ) + .await + .expect("Could not execute hover") + .unwrap(); + + let HoverContents::Markup(markup) = hover.contents else { + panic!("Expected hover contents to be Markup"); + }; + + assert_eq!(markup.value, expected_text); + } + + #[test] + async fn hover_on_module() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 6, character: 9 }, + r#" one + mod subone"#, + ) + .await; + } + + #[test] + async fn hover_on_struct() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 9, character: 20 }, + r#" one::subone + struct SubOneStruct { + some_field: i32, + some_other_field: Field, + }"#, + ) + .await; + } + + #[test] + async fn hover_on_generic_struct() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 46, character: 17 }, + r#" one::subone + struct GenericStruct { + }"#, + ) + .await; + } + + #[test] + async fn hover_on_struct_member() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 9, character: 35 }, + r#" one::subone::SubOneStruct + some_field: i32"#, + ) + .await; + } + + #[test] + async fn hover_on_trait() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 12, character: 17 }, + r#" one::subone + trait SomeTrait"#, + ) + .await; + } + + #[test] + async fn hover_on_global() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 15, character: 25 }, + r#" one::subone + global some_global: Field"#, + ) + .await; + } + + #[test] + async fn hover_on_function() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 3, character: 4 }, + r#" one + fn function_one()"#, + ) + .await; + } + + #[test] + async fn hover_on_local_function() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 2, character: 7 }, + r#" two + fn function_two()"#, + ) + .await; + } + + #[test] + async fn hover_on_struct_method() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 20, character: 6 }, + r#" one::subone::SubOneStruct + fn foo(self, x: i32, y: i32) -> Field"#, + ) + .await; + } + + #[test] + async fn hover_on_local_var() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 25, character: 12 }, + " let regular_var: Field", + ) + .await; + } + + #[test] + async fn hover_on_local_mut_var() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 27, character: 4 }, + " let mut mutable_var: Field", + ) + .await; + } + + #[test] + async fn hover_on_parameter() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 31, character: 12 }, + " some_param: i32", + ) + .await; + } + + #[test] + async fn hover_on_alias() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 34, character: 17 }, + r#" one::subone + type SomeAlias = i32"#, + ) + .await; + } + + #[test] + async fn hover_on_trait_on_call() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 39, character: 17 }, + r#" std::default + trait Default"#, + ) + .await; + } + + #[test] + async fn hover_on_std_module_in_use() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 36, character: 9 }, + r#" std + mod default"#, + ) + .await; + } + + #[test] + async fn hover_on_crate_module_in_call() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 15, character: 17 }, + r#" one + mod subone"#, + ) + .await; + } + + #[test] + async fn hover_on_module_without_crate_or_std_prefix() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 43, character: 4 }, + r#" two + mod other"#, + ) + .await; + } + + #[test] + async fn hover_on_module_with_crate_prefix() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 44, character: 11 }, + r#" two + mod other"#, + ) + .await; + } + + #[test] + async fn hover_on_module_on_struct_constructor() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 19, character: 12 }, + r#" one + mod subone"#, + ) + .await; + } + + #[test] + async fn hover_on_type_inside_generic_arguments() { + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 51, character: 30 }, + r#" one::subone + struct SubOneStruct { + some_field: i32, + some_other_field: Field, + }"#, + ) + .await; + } +} diff --git a/tooling/lsp/src/requests/inlay_hint.rs b/tooling/lsp/src/requests/inlay_hint.rs new file mode 100644 index 00000000000..2afa5fa44fd --- /dev/null +++ b/tooling/lsp/src/requests/inlay_hint.rs @@ -0,0 +1,929 @@ +use fm::codespan_files::Files; +use std::future::{self, Future}; + +use async_lsp::ResponseError; +use fm::{FileId, FileMap, PathString}; +use lsp_types::{ + InlayHint, InlayHintKind, InlayHintLabel, InlayHintLabelPart, InlayHintParams, Position, + TextDocumentPositionParams, +}; +use noirc_errors::{Location, Span}; +use noirc_frontend::{ + self, + ast::{ + BlockExpression, Expression, ExpressionKind, Ident, LetStatement, NoirFunction, Pattern, + Statement, StatementKind, TraitImplItem, TraitItem, UnresolvedTypeData, + }, + hir_def::stmt::HirPattern, + macros_api::NodeInterner, + node_interner::ReferenceId, + parser::{Item, ItemKind}, + ParsedModule, Type, TypeBinding, TypeVariable, TypeVariableKind, +}; + +use crate::LspState; + +use super::{process_request, to_lsp_location, InlayHintsOptions}; + +pub(crate) fn on_inlay_hint_request( + state: &mut LspState, + params: InlayHintParams, +) -> impl Future>, ResponseError>> { + let text_document_position_params = TextDocumentPositionParams { + text_document: params.text_document.clone(), + position: Position { line: 0, character: 0 }, + }; + + let options = state.options.inlay_hints; + + let result = process_request(state, text_document_position_params, |args| { + let path = PathString::from_path(params.text_document.uri.to_file_path().unwrap()); + args.files.get_file_id(&path).map(|file_id| { + let file = args.files.get_file(file_id).unwrap(); + let source = file.source(); + let (parsed_moduled, _errors) = noirc_frontend::parse_program(source); + + let span = range_to_byte_span(args.files, file_id, ¶ms.range) + .map(|range| Span::from(range.start as u32..range.end as u32)); + + let mut collector = + InlayHintCollector::new(args.files, file_id, args.interner, span, options); + collector.collect_in_parsed_module(&parsed_moduled); + collector.inlay_hints + }) + }); + future::ready(result) +} + +pub(crate) struct InlayHintCollector<'a> { + files: &'a FileMap, + file_id: FileId, + interner: &'a NodeInterner, + span: Option, + options: InlayHintsOptions, + inlay_hints: Vec, +} + +impl<'a> InlayHintCollector<'a> { + fn new( + files: &'a FileMap, + file_id: FileId, + interner: &'a NodeInterner, + span: Option, + options: InlayHintsOptions, + ) -> InlayHintCollector<'a> { + InlayHintCollector { files, file_id, interner, span, options, inlay_hints: Vec::new() } + } + fn collect_in_parsed_module(&mut self, parsed_module: &ParsedModule) { + for item in &parsed_module.items { + self.collect_in_item(item); + } + } + + fn collect_in_item(&mut self, item: &Item) { + if !self.intersects_span(item.span) { + return; + } + + match &item.kind { + ItemKind::Function(noir_function) => self.collect_in_noir_function(noir_function), + ItemKind::Trait(noir_trait) => { + for item in &noir_trait.items { + self.collect_in_trait_item(item); + } + } + ItemKind::TraitImpl(noir_trait_impl) => { + for item in &noir_trait_impl.items { + self.collect_in_trait_impl_item(item); + } + } + ItemKind::Impl(type_impl) => { + for (noir_function, _) in &type_impl.methods { + self.collect_in_noir_function(noir_function); + } + } + ItemKind::Global(let_statement) => self.collect_in_let_statement(let_statement), + ItemKind::Submodules(parsed_submodule) => { + self.collect_in_parsed_module(&parsed_submodule.contents); + } + ItemKind::ModuleDecl(_) => (), + ItemKind::Import(_) => (), + ItemKind::Struct(_) => (), + ItemKind::TypeAlias(_) => (), + } + } + + fn collect_in_trait_item(&mut self, item: &TraitItem) { + match item { + TraitItem::Function { body, .. } => { + if let Some(body) = body { + self.collect_in_block_expression(body); + } + } + TraitItem::Constant { name: _, typ: _, default_value } => { + if let Some(default_value) = default_value { + self.collect_in_expression(default_value); + } + } + TraitItem::Type { .. } => (), + } + } + + fn collect_in_trait_impl_item(&mut self, item: &TraitImplItem) { + match item { + TraitImplItem::Function(noir_function) => self.collect_in_noir_function(noir_function), + TraitImplItem::Constant(_name, _typ, default_value) => { + self.collect_in_expression(default_value); + } + TraitImplItem::Type { .. } => (), + } + } + + fn collect_in_noir_function(&mut self, noir_function: &NoirFunction) { + self.collect_in_block_expression(&noir_function.def.body); + } + + fn collect_in_let_statement(&mut self, let_statement: &LetStatement) { + // Only show inlay hints for let variables that don't have an explicit type annotation + if let UnresolvedTypeData::Unspecified = let_statement.r#type.typ { + self.collect_in_pattern(&let_statement.pattern); + }; + + self.collect_in_expression(&let_statement.expression); + } + + fn collect_in_block_expression(&mut self, block_expression: &BlockExpression) { + for statement in &block_expression.statements { + self.collect_in_statement(statement); + } + } + + fn collect_in_statement(&mut self, statement: &Statement) { + if !self.intersects_span(statement.span) { + return; + } + + match &statement.kind { + StatementKind::Let(let_statement) => self.collect_in_let_statement(let_statement), + StatementKind::Constrain(constrain_statement) => { + self.collect_in_expression(&constrain_statement.0); + } + StatementKind::Expression(expression) => self.collect_in_expression(expression), + StatementKind::Assign(assign_statement) => { + self.collect_in_expression(&assign_statement.expression); + } + StatementKind::For(for_loop_statement) => { + self.collect_in_ident(&for_loop_statement.identifier); + self.collect_in_expression(&for_loop_statement.block); + } + StatementKind::Comptime(statement) => self.collect_in_statement(statement), + StatementKind::Semi(expression) => self.collect_in_expression(expression), + StatementKind::Break => (), + StatementKind::Continue => (), + StatementKind::Error => (), + } + } + + fn collect_in_expression(&mut self, expression: &Expression) { + if !self.intersects_span(expression.span) { + return; + } + + match &expression.kind { + ExpressionKind::Block(block_expression) => { + self.collect_in_block_expression(block_expression); + } + ExpressionKind::Prefix(prefix_expression) => { + self.collect_in_expression(&prefix_expression.rhs); + } + ExpressionKind::Index(index_expression) => { + self.collect_in_expression(&index_expression.collection); + self.collect_in_expression(&index_expression.index); + } + ExpressionKind::Call(call_expression) => { + self.collect_call_parameter_names( + get_expression_name(&call_expression.func), + call_expression.func.span, + &call_expression.arguments, + ); + + self.collect_in_expression(&call_expression.func); + for arg in &call_expression.arguments { + self.collect_in_expression(arg); + } + } + ExpressionKind::MethodCall(method_call_expression) => { + self.collect_call_parameter_names( + Some(method_call_expression.method_name.to_string()), + method_call_expression.method_name.span(), + &method_call_expression.arguments, + ); + + self.collect_in_expression(&method_call_expression.object); + for arg in &method_call_expression.arguments { + self.collect_in_expression(arg); + } + } + ExpressionKind::Constructor(constructor_expression) => { + for (_name, expr) in &constructor_expression.fields { + self.collect_in_expression(expr); + } + } + ExpressionKind::MemberAccess(member_access_expression) => { + self.collect_in_expression(&member_access_expression.lhs); + } + ExpressionKind::Cast(cast_expression) => { + self.collect_in_expression(&cast_expression.lhs); + } + ExpressionKind::Infix(infix_expression) => { + self.collect_in_expression(&infix_expression.lhs); + self.collect_in_expression(&infix_expression.rhs); + } + ExpressionKind::If(if_expression) => { + self.collect_in_expression(&if_expression.condition); + self.collect_in_expression(&if_expression.consequence); + if let Some(alternative) = &if_expression.alternative { + self.collect_in_expression(alternative); + } + } + ExpressionKind::Tuple(expressions) => { + for expression in expressions { + self.collect_in_expression(expression); + } + } + ExpressionKind::Lambda(lambda) => self.collect_in_expression(&lambda.body), + ExpressionKind::Parenthesized(parenthesized) => { + self.collect_in_expression(parenthesized); + } + ExpressionKind::Unquote(expression) => { + self.collect_in_expression(expression); + } + ExpressionKind::Comptime(block_expression, _span) => { + self.collect_in_block_expression(block_expression); + } + ExpressionKind::Literal(..) + | ExpressionKind::Variable(..) + | ExpressionKind::Quote(..) + | ExpressionKind::Resolved(..) + | ExpressionKind::Error => (), + } + } + + fn collect_in_pattern(&mut self, pattern: &Pattern) { + if !self.options.type_hints.enabled { + return; + } + + match pattern { + Pattern::Identifier(ident) => { + self.collect_in_ident(ident); + } + Pattern::Mutable(pattern, _span, _is_synthesized) => { + self.collect_in_pattern(pattern); + } + Pattern::Tuple(patterns, _span) => { + for pattern in patterns { + self.collect_in_pattern(pattern); + } + } + Pattern::Struct(_path, patterns, _span) => { + for (_ident, pattern) in patterns { + self.collect_in_pattern(pattern); + } + } + } + } + + fn collect_in_ident(&mut self, ident: &Ident) { + if !self.options.type_hints.enabled { + return; + } + + let span = ident.span(); + let location = Location::new(ident.span(), self.file_id); + if let Some(lsp_location) = to_lsp_location(self.files, self.file_id, span) { + if let Some(referenced) = self.interner.find_referenced(location) { + match referenced { + ReferenceId::Global(global_id) => { + let global_info = self.interner.get_global(global_id); + let definition_id = global_info.definition_id; + let typ = self.interner.definition_type(definition_id); + self.push_type_hint(lsp_location, &typ); + } + ReferenceId::Local(definition_id) => { + let typ = self.interner.definition_type(definition_id); + self.push_type_hint(lsp_location, &typ); + } + ReferenceId::StructMember(struct_id, field_index) => { + let struct_type = self.interner.get_struct(struct_id); + let struct_type = struct_type.borrow(); + let (_field_name, field_type) = struct_type.field_at(field_index); + self.push_type_hint(lsp_location, field_type); + } + ReferenceId::Module(_) + | ReferenceId::Struct(_) + | ReferenceId::Trait(_) + | ReferenceId::Function(_) + | ReferenceId::Alias(_) + | ReferenceId::Reference(..) => (), + } + } + } + } + + fn push_type_hint(&mut self, location: lsp_types::Location, typ: &Type) { + let position = location.range.end; + + let mut parts = Vec::new(); + parts.push(string_part(": ")); + push_type_parts(typ, &mut parts, self.files); + + self.inlay_hints.push(InlayHint { + position, + label: InlayHintLabel::LabelParts(parts), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }); + } + + fn collect_call_parameter_names( + &mut self, + function_name: Option, + at: Span, + arguments: &[Expression], + ) { + if !self.options.parameter_hints.enabled { + return; + } + + // The `at` span might be the span of a path like `Foo::bar`. + // In order to find the function behind it, we use a span that is just the last char. + let at = Span::single_char(at.end() - 1); + + let referenced = self.interner.find_referenced(Location::new(at, self.file_id)); + if let Some(ReferenceId::Function(func_id)) = referenced { + let func_meta = self.interner.function_meta(&func_id); + + let mut parameters = func_meta.parameters.iter().peekable(); + let mut parameters_count = func_meta.parameters.len(); + + // Skip `self` parameter + if let Some((pattern, _, _)) = parameters.peek() { + if self.is_self_parameter(pattern) { + parameters.next(); + parameters_count -= 1; + } + } + + for (call_argument, (pattern, _, _)) in arguments.iter().zip(parameters) { + let Some(lsp_location) = + to_lsp_location(self.files, self.file_id, call_argument.span) + else { + continue; + }; + + let Some(parameter_name) = self.get_pattern_name(pattern) else { + continue; + }; + + if parameter_name.starts_with('_') { + continue; + } + + if parameters_count == 1 { + if parameter_name.len() == 1 + || parameter_name == "other" + || parameter_name == "value" + { + continue; + } + + if let Some(function_name) = &function_name { + if function_name.ends_with(¶meter_name) { + continue; + } + } + } + + if let Some(call_argument_name) = get_expression_name(call_argument) { + if parameter_name == call_argument_name + || call_argument_name.ends_with(¶meter_name) + { + continue; + } + } + + self.push_parameter_hint(lsp_location.range.start, ¶meter_name); + } + } + } + + fn get_pattern_name(&self, pattern: &HirPattern) -> Option { + match pattern { + HirPattern::Identifier(ident) => { + let definition = self.interner.definition(ident.id); + Some(definition.name.clone()) + } + HirPattern::Mutable(pattern, _location) => self.get_pattern_name(pattern), + HirPattern::Tuple(..) | HirPattern::Struct(..) => None, + } + } + + fn push_parameter_hint(&mut self, position: Position, str: &str) { + self.push_text_hint(position, format!("{}: ", str)); + } + + fn push_text_hint(&mut self, position: Position, str: String) { + self.inlay_hints.push(InlayHint { + position, + label: InlayHintLabel::String(str), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }); + } + + fn is_self_parameter(&self, pattern: &HirPattern) -> bool { + match pattern { + HirPattern::Identifier(ident) => { + let definition_info = self.interner.definition(ident.id); + definition_info.name == "self" + } + HirPattern::Mutable(pattern, _location) => self.is_self_parameter(pattern), + HirPattern::Tuple(..) | HirPattern::Struct(..) => false, + } + } + + fn intersects_span(&self, other_span: Span) -> bool { + self.span.map_or(true, |span| span.intersects(&other_span)) + } +} + +fn string_part(str: impl Into) -> InlayHintLabelPart { + InlayHintLabelPart { value: str.into(), location: None, tooltip: None, command: None } +} + +fn text_part_with_location(str: String, location: Location, files: &FileMap) -> InlayHintLabelPart { + InlayHintLabelPart { + value: str, + location: to_lsp_location(files, location.file, location.span), + tooltip: None, + command: None, + } +} + +fn push_type_parts(typ: &Type, parts: &mut Vec, files: &FileMap) { + match typ { + Type::Array(size, typ) => { + parts.push(string_part("[")); + push_type_parts(typ, parts, files); + parts.push(string_part("; ")); + push_type_parts(size, parts, files); + parts.push(string_part("]")); + } + Type::Slice(typ) => { + parts.push(string_part("[")); + push_type_parts(typ, parts, files); + parts.push(string_part("]")); + } + Type::Tuple(types) => { + parts.push(string_part("(")); + for (index, typ) in types.iter().enumerate() { + push_type_parts(typ, parts, files); + if index != types.len() - 1 { + parts.push(string_part(", ")); + } + } + parts.push(string_part(")")); + } + Type::Struct(struct_type, generics) => { + let struct_type = struct_type.borrow(); + let location = Location::new(struct_type.name.span(), struct_type.location.file); + parts.push(text_part_with_location(struct_type.name.to_string(), location, files)); + if !generics.is_empty() { + parts.push(string_part("<")); + for (index, generic) in generics.iter().enumerate() { + push_type_parts(generic, parts, files); + if index != generics.len() - 1 { + parts.push(string_part(", ")); + } + } + parts.push(string_part(">")); + } + } + Type::Alias(type_alias, generics) => { + let type_alias = type_alias.borrow(); + let location = Location::new(type_alias.name.span(), type_alias.location.file); + parts.push(text_part_with_location(type_alias.name.to_string(), location, files)); + if !generics.is_empty() { + parts.push(string_part("<")); + for (index, generic) in generics.iter().enumerate() { + push_type_parts(generic, parts, files); + if index != generics.len() - 1 { + parts.push(string_part(", ")); + } + } + parts.push(string_part(">")); + } + } + Type::Function(args, return_type, _env) => { + parts.push(string_part("fn(")); + for (index, arg) in args.iter().enumerate() { + push_type_parts(arg, parts, files); + if index != args.len() - 1 { + parts.push(string_part(", ")); + } + } + parts.push(string_part(") -> ")); + push_type_parts(return_type, parts, files); + } + Type::MutableReference(typ) => { + parts.push(string_part("&mut ")); + push_type_parts(typ, parts, files); + } + Type::TypeVariable(var, TypeVariableKind::Normal) => { + push_type_variable_parts(var, parts, files); + } + Type::TypeVariable(binding, TypeVariableKind::Integer) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + push_type_parts(&Type::default_int_type(), parts, files); + } else { + push_type_variable_parts(binding, parts, files); + } + } + Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + parts.push(string_part("Field")); + } else { + push_type_variable_parts(binding, parts, files); + } + } + Type::TypeVariable(binding, TypeVariableKind::Constant(n)) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + // TypeVariableKind::Constant(n) binds to Type::Constant(n) by default, so just show that. + parts.push(string_part(n.to_string())); + } else { + push_type_variable_parts(binding, parts, files); + } + } + + Type::FieldElement + | Type::Integer(..) + | Type::Bool + | Type::String(..) + | Type::FmtString(..) + | Type::Unit + | Type::TraitAsType(..) + | Type::NamedGeneric(..) + | Type::Forall(..) + | Type::Constant(..) + | Type::Quoted(..) + | Type::Error => { + parts.push(string_part(typ.to_string())); + } + } +} + +fn push_type_variable_parts( + var: &TypeVariable, + parts: &mut Vec, + files: &FileMap, +) { + let var = &*var.borrow(); + match var { + TypeBinding::Bound(typ) => { + push_type_parts(typ, parts, files); + } + TypeBinding::Unbound(..) => { + parts.push(string_part(var.to_string())); + } + } +} + +fn get_expression_name(expression: &Expression) -> Option { + match &expression.kind { + ExpressionKind::Variable(path, _) => Some(path.last_segment().to_string()), + ExpressionKind::Prefix(prefix) => get_expression_name(&prefix.rhs), + ExpressionKind::MemberAccess(member_access) => Some(member_access.rhs.to_string()), + ExpressionKind::Call(call) => get_expression_name(&call.func), + ExpressionKind::MethodCall(method_call) => Some(method_call.method_name.to_string()), + ExpressionKind::Cast(cast) => get_expression_name(&cast.lhs), + ExpressionKind::Parenthesized(expr) => get_expression_name(expr), + ExpressionKind::Constructor(..) + | ExpressionKind::Infix(..) + | ExpressionKind::Index(..) + | ExpressionKind::Block(..) + | ExpressionKind::If(..) + | ExpressionKind::Lambda(..) + | ExpressionKind::Tuple(..) + | ExpressionKind::Quote(..) + | ExpressionKind::Unquote(..) + | ExpressionKind::Comptime(..) + | ExpressionKind::Resolved(..) + | ExpressionKind::Literal(..) + | ExpressionKind::Error => None, + } +} + +// These functions are copied from the codespan_lsp crate, except that they never panic +// (the library will sometimes panic, so functions returning Result are not always accurate) + +fn range_to_byte_span( + files: &FileMap, + file_id: FileId, + range: &lsp_types::Range, +) -> Option> { + Some( + position_to_byte_index(files, file_id, &range.start)? + ..position_to_byte_index(files, file_id, &range.end)?, + ) +} + +fn position_to_byte_index( + files: &FileMap, + file_id: FileId, + position: &lsp_types::Position, +) -> Option { + let Ok(source) = files.source(file_id) else { + return None; + }; + + let Ok(line_span) = files.line_range(file_id, position.line as usize) else { + return None; + }; + let line_str = source.get(line_span.clone())?; + + let byte_offset = character_to_line_offset(line_str, position.character)?; + + Some(line_span.start + byte_offset) +} + +fn character_to_line_offset(line: &str, character: u32) -> Option { + let line_len = line.len(); + let mut character_offset = 0; + + let mut chars = line.chars(); + while let Some(ch) = chars.next() { + if character_offset == character { + let chars_off = chars.as_str().len(); + let ch_off = ch.len_utf8(); + + return Some(line_len - chars_off - ch_off); + } + + character_offset += ch.len_utf16() as u32; + } + + // Handle positions after the last character on the line + if character_offset == character { + Some(line_len) + } else { + None + } +} + +#[cfg(test)] +mod inlay_hints_tests { + use crate::{ + requests::{ParameterHintsOptions, TypeHintsOptions}, + test_utils, + }; + + use super::*; + use lsp_types::{Range, TextDocumentIdentifier, WorkDoneProgressParams}; + use tokio::test; + + async fn get_inlay_hints( + start_line: u32, + end_line: u32, + options: InlayHintsOptions, + ) -> Vec { + let (mut state, noir_text_document) = test_utils::init_lsp_server("inlay_hints").await; + state.options.inlay_hints = options; + + on_inlay_hint_request( + &mut state, + InlayHintParams { + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + text_document: TextDocumentIdentifier { uri: noir_text_document }, + range: lsp_types::Range { + start: lsp_types::Position { line: start_line, character: 0 }, + end: lsp_types::Position { line: end_line, character: 0 }, + }, + }, + ) + .await + .expect("Could not execute on_inlay_hint_request") + .unwrap() + } + + fn no_hints() -> InlayHintsOptions { + InlayHintsOptions { + type_hints: TypeHintsOptions { enabled: false }, + parameter_hints: ParameterHintsOptions { enabled: false }, + } + } + + fn type_hints() -> InlayHintsOptions { + InlayHintsOptions { + type_hints: TypeHintsOptions { enabled: true }, + parameter_hints: ParameterHintsOptions { enabled: false }, + } + } + + fn parameter_hints() -> InlayHintsOptions { + InlayHintsOptions { + type_hints: TypeHintsOptions { enabled: false }, + parameter_hints: ParameterHintsOptions { enabled: true }, + } + } + + #[test] + async fn test_do_not_collect_type_hints_if_disabled() { + let inlay_hints = get_inlay_hints(0, 3, no_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_type_inlay_hints_without_location() { + let inlay_hints = get_inlay_hints(0, 3, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 1, character: 11 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "Field"); + + // Field can't be reached (there's no source code for it) + assert_eq!(labels[1].location, None); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + } + + #[test] + async fn test_type_inlay_hints_with_location() { + let inlay_hints = get_inlay_hints(12, 15, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 13, character: 11 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "Foo"); + + // Check that it points to "Foo" in `struct Foo` + let location = labels[1].location.clone().expect("Expected a location"); + assert_eq!( + location.range, + Range { + start: Position { line: 4, character: 7 }, + end: Position { line: 4, character: 10 } + } + ); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + } + + #[test] + async fn test_type_inlay_hints_in_for() { + let inlay_hints = get_inlay_hints(16, 18, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 17, character: 9 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "u32"); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + } + + #[test] + async fn test_type_inlay_hints_in_global() { + let inlay_hints = get_inlay_hints(19, 21, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 20, character: 10 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "Field"); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + } + + #[test] + async fn test_do_not_panic_when_given_line_is_too_big() { + let inlay_hints = get_inlay_hints(0, 100000, type_hints()).await; + assert!(!inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_collect_parameter_inlay_hints_if_disabled() { + let inlay_hints = get_inlay_hints(24, 26, no_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_collect_parameter_inlay_hints_in_function_call() { + let inlay_hints = get_inlay_hints(24, 26, parameter_hints()).await; + assert_eq!(inlay_hints.len(), 2); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 25, character: 12 }); + if let InlayHintLabel::String(label) = &inlay_hint.label { + assert_eq!(label, "one: "); + } else { + panic!("Expected InlayHintLabel::String, got {:?}", inlay_hint.label); + } + + let inlay_hint = &inlay_hints[1]; + assert_eq!(inlay_hint.position, Position { line: 25, character: 15 }); + if let InlayHintLabel::String(label) = &inlay_hint.label { + assert_eq!(label, "two: "); + } else { + panic!("Expected InlayHintLabel::String, got {:?}", inlay_hint.label); + } + } + + #[test] + async fn test_collect_parameter_inlay_hints_in_method_call() { + let inlay_hints = get_inlay_hints(36, 39, parameter_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 38, character: 18 }); + if let InlayHintLabel::String(label) = &inlay_hint.label { + assert_eq!(label, "one: "); + } else { + panic!("Expected InlayHintLabel::String, got {:?}", inlay_hint.label); + } + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_name_matches_var_name() { + let inlay_hints = get_inlay_hints(41, 45, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_name_matches_member_name() { + let inlay_hints = get_inlay_hints(48, 52, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_name_matches_call_name() { + let inlay_hints = get_inlay_hints(57, 60, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_single_param_name_is_suffix_of_function_name( + ) { + let inlay_hints = get_inlay_hints(64, 67, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_param_name_starts_with_underscore() { + let inlay_hints = get_inlay_hints(71, 73, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_single_argument_with_single_letter() { + let inlay_hints = get_inlay_hints(77, 79, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } + + #[test] + async fn test_do_not_show_parameter_inlay_hints_if_param_name_is_suffix_of_arg_name() { + let inlay_hints = get_inlay_hints(89, 92, parameter_hints()).await; + assert!(inlay_hints.is_empty()); + } +} diff --git a/tooling/lsp/src/requests/mod.rs b/tooling/lsp/src/requests/mod.rs index 48299ff7459..4d261c1b50a 100644 --- a/tooling/lsp/src/requests/mod.rs +++ b/tooling/lsp/src/requests/mod.rs @@ -1,5 +1,6 @@ -use std::future::Future; +use std::{collections::HashMap, future::Future}; +use crate::insert_all_files_for_workspace_into_file_manager; use crate::{ parse_diff, resolve_workspace_for_source_path, types::{CodeLensOptions, InitializeParams}, @@ -11,10 +12,9 @@ use lsp_types::{ TextDocumentSyncCapability, TextDocumentSyncKind, TypeDefinitionProviderCapability, Url, WorkDoneProgressOptions, }; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo_fmt::Config; use noirc_driver::file_manager_with_stdlib; -use noirc_frontend::macros_api::NodeInterner; +use noirc_frontend::{graph::Dependency, macros_api::NodeInterner}; use serde::{Deserialize, Serialize}; use crate::{ @@ -33,8 +33,11 @@ use crate::{ // and params passed in. mod code_lens_request; +mod document_symbol; mod goto_declaration; mod goto_definition; +mod hover; +mod inlay_hint; mod profile_run; mod references; mod rename; @@ -43,23 +46,49 @@ mod tests; pub(crate) use { code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, - goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, - goto_definition::on_goto_type_definition_request, profile_run::on_profile_run_request, - references::on_references_request, rename::on_prepare_rename_request, - rename::on_rename_request, test_run::on_test_run_request, tests::on_tests_request, + document_symbol::on_document_symbol_request, goto_declaration::on_goto_declaration_request, + goto_definition::on_goto_definition_request, goto_definition::on_goto_type_definition_request, + hover::on_hover_request, inlay_hint::on_inlay_hint_request, + profile_run::on_profile_run_request, references::on_references_request, + rename::on_prepare_rename_request, rename::on_rename_request, test_run::on_test_run_request, + tests::on_tests_request, }; /// LSP client will send initialization request after the server has started. /// [InitializeParams].`initialization_options` will contain the options sent from the client. -#[derive(Debug, Deserialize, Serialize)] -struct LspInitializationOptions { +#[derive(Debug, Deserialize, Serialize, Copy, Clone)] +pub(crate) struct LspInitializationOptions { /// Controls whether code lens is enabled by the server /// By default this will be set to true (enabled). #[serde(rename = "enableCodeLens", default = "default_enable_code_lens")] - enable_code_lens: bool, + pub(crate) enable_code_lens: bool, #[serde(rename = "enableParsingCache", default = "default_enable_parsing_cache")] - enable_parsing_cache: bool, + pub(crate) enable_parsing_cache: bool, + + #[serde(rename = "inlayHints", default = "default_inlay_hints")] + pub(crate) inlay_hints: InlayHintsOptions, +} + +#[derive(Debug, Deserialize, Serialize, Copy, Clone)] +pub(crate) struct InlayHintsOptions { + #[serde(rename = "typeHints", default = "default_type_hints")] + pub(crate) type_hints: TypeHintsOptions, + + #[serde(rename = "parameterHints", default = "default_parameter_hints")] + pub(crate) parameter_hints: ParameterHintsOptions, +} + +#[derive(Debug, Deserialize, Serialize, Copy, Clone)] +pub(crate) struct TypeHintsOptions { + #[serde(rename = "enabled", default = "default_type_hints_enabled")] + pub(crate) enabled: bool, +} + +#[derive(Debug, Deserialize, Serialize, Copy, Clone)] +pub(crate) struct ParameterHintsOptions { + #[serde(rename = "enabled", default = "default_parameter_hints_enabled")] + pub(crate) enabled: bool, } fn default_enable_code_lens() -> bool { @@ -70,11 +99,35 @@ fn default_enable_parsing_cache() -> bool { true } +fn default_inlay_hints() -> InlayHintsOptions { + InlayHintsOptions { + type_hints: default_type_hints(), + parameter_hints: default_parameter_hints(), + } +} + +fn default_type_hints() -> TypeHintsOptions { + TypeHintsOptions { enabled: default_type_hints_enabled() } +} + +fn default_type_hints_enabled() -> bool { + true +} + +fn default_parameter_hints() -> ParameterHintsOptions { + ParameterHintsOptions { enabled: default_parameter_hints_enabled() } +} + +fn default_parameter_hints_enabled() -> bool { + true +} + impl Default for LspInitializationOptions { fn default() -> Self { Self { enable_code_lens: default_enable_code_lens(), enable_parsing_cache: default_enable_parsing_cache(), + inlay_hints: default_inlay_hints(), } } } @@ -88,7 +141,7 @@ pub(crate) fn on_initialize( .initialization_options .and_then(|value| serde_json::from_value(value).ok()) .unwrap_or_default(); - state.parsing_cache_enabled = initialization_options.enable_parsing_cache; + state.options = initialization_options; async move { let text_document_sync = TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL); @@ -127,6 +180,25 @@ pub(crate) fn on_initialize( work_done_progress: None, }, })), + hover_provider: Some(lsp_types::OneOf::Right(lsp_types::HoverOptions { + work_done_progress_options: WorkDoneProgressOptions { + work_done_progress: None, + }, + })), + inlay_hint_provider: Some(lsp_types::OneOf::Right(lsp_types::InlayHintOptions { + work_done_progress_options: WorkDoneProgressOptions { + work_done_progress: None, + }, + resolve_provider: None, + })), + document_symbol_provider: Some(lsp_types::OneOf::Right( + lsp_types::DocumentSymbolOptions { + work_done_progress_options: WorkDoneProgressOptions { + work_done_progress: None, + }, + label: Some("Noir".to_string()), + }, + )), }, server_info: None, }) @@ -264,26 +336,42 @@ pub(crate) fn on_shutdown( async { Ok(()) } } +pub(crate) struct ProcessRequestCallbackArgs<'a> { + location: noirc_errors::Location, + files: &'a FileMap, + interner: &'a NodeInterner, + interners: &'a HashMap, + root_crate_name: String, + root_crate_dependencies: &'a Vec, +} + pub(crate) fn process_request( state: &mut LspState, text_document_position_params: TextDocumentPositionParams, callback: F, ) -> Result where - F: FnOnce(noirc_errors::Location, &NodeInterner, &FileMap) -> T, + F: FnOnce(ProcessRequestCallbackArgs) -> T, { let file_path = text_document_position_params.text_document.uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); - let package = workspace.members.first().unwrap(); + let workspace = + resolve_workspace_for_source_path(file_path.as_path(), &state.root_path).unwrap(); + let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") + })?; let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + insert_all_files_for_workspace_into_file_manager( + state, + &workspace, + &mut workspace_file_manager, + ); let parsed_files = parse_diff(&workspace_file_manager, state); let (mut context, crate_id) = @@ -294,7 +382,7 @@ where interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false, None); + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, None); interner = &context.def_interner; } @@ -306,7 +394,88 @@ where &text_document_position_params.position, )?; - Ok(callback(location, interner, files)) + Ok(callback(ProcessRequestCallbackArgs { + location, + files, + interner, + interners: &state.cached_definitions, + root_crate_name: package.name.to_string(), + root_crate_dependencies: &context.crate_graph[context.root_crate_id()].dependencies, + })) +} +pub(crate) fn find_all_references_in_workspace( + location: noirc_errors::Location, + interner: &NodeInterner, + cached_interners: &HashMap, + files: &FileMap, + include_declaration: bool, + include_self_type_name: bool, +) -> Option> { + // First find the node that's referenced by the given location, if any + let referenced = interner.find_referenced(location); + + if let Some(referenced) = referenced { + // If we found the referenced node, find its location + let referenced_location = interner.reference_location(referenced); + + // Now we find all references that point to this location, in all interners + // (there's one interner per package, and all interners in a workspace rely on the + // same FileManager so a Location/FileId in one package is the same as in another package) + let mut locations = find_all_references( + referenced_location, + interner, + files, + include_declaration, + include_self_type_name, + ); + for interner in cached_interners.values() { + locations.extend(find_all_references( + referenced_location, + interner, + files, + include_declaration, + include_self_type_name, + )); + } + + // The LSP client usually removes duplicate loctions, but we do it here just in case they don't + locations.sort_by_key(|location| { + ( + location.uri.to_string(), + location.range.start.line, + location.range.start.character, + location.range.end.line, + location.range.end.character, + ) + }); + locations.dedup(); + + if locations.is_empty() { + None + } else { + Some(locations) + } + } else { + None + } +} + +pub(crate) fn find_all_references( + referenced_location: noirc_errors::Location, + interner: &NodeInterner, + files: &FileMap, + include_declaration: bool, + include_self_type_name: bool, +) -> Vec { + interner + .find_all_references(referenced_location, include_declaration, include_self_type_name) + .map(|locations| { + locations + .iter() + .filter_map(|location| to_lsp_location(files, location.file, location.span)) + .collect() + }) + .unwrap_or_default() } #[cfg(test)] diff --git a/tooling/lsp/src/requests/profile_run.rs b/tooling/lsp/src/requests/profile_run.rs index 57bc3299455..d3b7743557a 100644 --- a/tooling/lsp/src/requests/profile_run.rs +++ b/tooling/lsp/src/requests/profile_run.rs @@ -3,9 +3,10 @@ use std::{ future::{self, Future}, }; +use crate::insert_all_files_for_workspace_into_file_manager; use acvm::acir::circuit::ExpressionWidth; use async_lsp::{ErrorCode, ResponseError}; -use nargo::{insert_all_files_for_workspace_into_file_manager, ops::report_errors}; +use nargo::ops::report_errors; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_artifacts::debug::DebugArtifact; use noirc_driver::{ @@ -53,7 +54,11 @@ fn on_profile_run_request_inner( })?; let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + insert_all_files_for_workspace_into_file_manager( + state, + &workspace, + &mut workspace_file_manager, + ); let parsed_files = parse_diff(&workspace_file_manager, state); // Since we filtered on crate name, this should be the only item in the iterator diff --git a/tooling/lsp/src/requests/references.rs b/tooling/lsp/src/requests/references.rs index f8c23632936..375e0b69aed 100644 --- a/tooling/lsp/src/requests/references.rs +++ b/tooling/lsp/src/requests/references.rs @@ -5,32 +5,34 @@ use lsp_types::{Location, ReferenceParams}; use crate::LspState; -use super::{process_request, to_lsp_location}; +use super::{find_all_references_in_workspace, process_request}; pub(crate) fn on_references_request( state: &mut LspState, params: ReferenceParams, ) -> impl Future>, ResponseError>> { - let result = - process_request(state, params.text_document_position, |location, interner, files| { - interner.find_all_references(location, params.context.include_declaration, true).map( - |locations| { - locations - .iter() - .filter_map(|location| to_lsp_location(files, location.file, location.span)) - .collect() - }, - ) - }); + let include_declaration = params.context.include_declaration; + let result = process_request(state, params.text_document_position, |args| { + find_all_references_in_workspace( + args.location, + args.interner, + args.interners, + args.files, + include_declaration, + true, + ) + }); future::ready(result) } #[cfg(test)] mod references_tests { use super::*; + use crate::notifications; use crate::test_utils::{self, search_in_file}; use lsp_types::{ - PartialResultParams, ReferenceContext, TextDocumentPositionParams, WorkDoneProgressParams, + PartialResultParams, Position, Range, ReferenceContext, TextDocumentPositionParams, Url, + WorkDoneProgressParams, }; use tokio::test; @@ -91,4 +93,79 @@ mod references_tests { async fn test_on_references_request_without_including_declaration() { check_references_succeeds("rename_function", "another_function", 0, false).await; } + + #[test] + async fn test_on_references_request_works_accross_workspace_packages() { + let (mut state, noir_text_document) = test_utils::init_lsp_server("workspace").await; + + // noir_text_document is always `src/main.nr` in the workspace directory, so let's go to the workspace dir + let noir_text_document = noir_text_document.to_file_path().unwrap(); + let workspace_dir = noir_text_document.parent().unwrap().parent().unwrap(); + + // Let's check that we can find references to `function_one` by doing that in the package "one" + // and getting results in the package "two" too. + let one_lib = Url::from_file_path(workspace_dir.join("one/src/lib.nr")).unwrap(); + let two_lib = Url::from_file_path(workspace_dir.join("two/src/lib.nr")).unwrap(); + + // We call this to open the document, so that the entire workspace is analyzed + let only_process_document_uri_package = false; + let output_diagnostics = true; + + notifications::process_workspace_for_noir_document( + &mut state, + one_lib.clone(), + only_process_document_uri_package, + output_diagnostics, + ) + .unwrap(); + + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: lsp_types::TextDocumentIdentifier { uri: one_lib.clone() }, + position: Position { line: 0, character: 7 }, + }, + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + partial_result_params: PartialResultParams { partial_result_token: None }, + context: ReferenceContext { include_declaration: true }, + }; + + let mut locations = on_references_request(&mut state, params) + .await + .expect("Could not execute on_references_request") + .unwrap(); + + // The definition, a use in "two", and a call in "two" + assert_eq!(locations.len(), 3); + + locations.sort_by_cached_key(|location| { + (location.uri.to_file_path().unwrap(), location.range.start.line) + }); + + assert_eq!(locations[0].uri, one_lib); + assert_eq!( + locations[0].range, + Range { + start: Position { line: 0, character: 7 }, + end: Position { line: 0, character: 19 }, + } + ); + + assert_eq!(locations[1].uri, two_lib); + assert_eq!( + locations[1].range, + Range { + start: Position { line: 0, character: 9 }, + end: Position { line: 0, character: 21 }, + } + ); + + assert_eq!(locations[2].uri, two_lib); + assert_eq!( + locations[2].range, + Range { + start: Position { line: 3, character: 4 }, + end: Position { line: 3, character: 16 }, + } + ); + } } diff --git a/tooling/lsp/src/requests/rename.rs b/tooling/lsp/src/requests/rename.rs index 906a5cbcaab..84956681167 100644 --- a/tooling/lsp/src/requests/rename.rs +++ b/tooling/lsp/src/requests/rename.rs @@ -11,14 +11,14 @@ use noirc_frontend::node_interner::ReferenceId; use crate::LspState; -use super::{process_request, to_lsp_location}; +use super::{find_all_references_in_workspace, process_request}; pub(crate) fn on_prepare_rename_request( state: &mut LspState, params: TextDocumentPositionParams, ) -> impl Future, ResponseError>> { - let result = process_request(state, params, |location, interner, _| { - let reference_id = interner.reference_at_location(location); + let result = process_request(state, params, |args| { + let reference_id = args.interner.reference_at_location(args.location); let rename_possible = match reference_id { // Rename shouldn't be possible when triggered on top of "Self" Some(ReferenceId::Reference(_, true /* is self type name */)) => false, @@ -34,41 +34,36 @@ pub(crate) fn on_rename_request( state: &mut LspState, params: RenameParams, ) -> impl Future, ResponseError>> { - let result = - process_request(state, params.text_document_position, |location, interner, files| { - let rename_changes = - interner.find_all_references(location, true, false).map(|locations| { - let rs = locations.iter().fold( - HashMap::new(), - |mut acc: HashMap>, location| { - let file_id = location.file; - let span = location.span; - - let Some(lsp_location) = to_lsp_location(files, file_id, span) else { - return acc; - }; - - let edit = TextEdit { - range: lsp_location.range, - new_text: params.new_name.clone(), - }; - - acc.entry(lsp_location.uri).or_default().push(edit); - - acc - }, - ); - rs - }); - - let response = WorkspaceEdit { - changes: rename_changes, - document_changes: None, - change_annotations: None, - }; - - Some(response) + let result = process_request(state, params.text_document_position, |args| { + let rename_changes = find_all_references_in_workspace( + args.location, + args.interner, + args.interners, + args.files, + true, + false, + ) + .map(|locations| { + let rs = locations.iter().fold( + HashMap::new(), + |mut acc: HashMap>, location| { + let edit = + TextEdit { range: location.range, new_text: params.new_name.clone() }; + acc.entry(location.uri.clone()).or_default().push(edit); + acc + }, + ); + rs }); + + let response = WorkspaceEdit { + changes: rename_changes, + document_changes: None, + change_annotations: None, + }; + + Some(response) + }); future::ready(result) } @@ -175,6 +170,11 @@ mod rename_tests { check_rename_succeeds("rename_function_use", "some_function").await; } + #[test] + async fn test_rename_method() { + check_rename_succeeds("rename_function", "some_method").await; + } + #[test] async fn test_rename_struct() { check_rename_succeeds("rename_struct", "Foo").await; diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index b4b9b62d6b6..bf4d9763faf 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -1,10 +1,8 @@ use std::future::{self, Future}; +use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, ResponseError}; -use nargo::{ - insert_all_files_for_workspace_into_file_manager, - ops::{run_test, TestStatus}, -}; +use nargo::ops::{run_test, TestStatus}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, @@ -51,7 +49,11 @@ fn on_test_run_request_inner( })?; let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + insert_all_files_for_workspace_into_file_manager( + state, + &workspace, + &mut workspace_file_manager, + ); let parsed_files = parse_diff(&workspace_file_manager, state); // Since we filtered on crate name, this should be the only item in the iterator @@ -59,7 +61,7 @@ fn on_test_run_request_inner( Some(package) => { let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - if check_crate(&mut context, crate_id, false, false, false, None).is_err() { + if check_crate(&mut context, crate_id, false, false, None).is_err() { let result = NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/tooling/lsp/src/requests/tests.rs b/tooling/lsp/src/requests/tests.rs index fb8b845df04..20b96029696 100644 --- a/tooling/lsp/src/requests/tests.rs +++ b/tooling/lsp/src/requests/tests.rs @@ -1,8 +1,8 @@ use std::future::{self, Future}; +use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; use lsp_types::{LogMessageParams, MessageType}; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{check_crate, file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; @@ -51,7 +51,11 @@ fn on_tests_request_inner( })?; let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + insert_all_files_for_workspace_into_file_manager( + state, + &workspace, + &mut workspace_file_manager, + ); let parsed_files = parse_diff(&workspace_file_manager, state); let package_tests: Vec<_> = workspace @@ -61,7 +65,7 @@ fn on_tests_request_inner( crate::prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false, false, None); + let _ = check_crate(&mut context, crate_id, false, false, None); // We don't add test headings for a package if it contains no `#[test]` functions get_package_tests_in_crate(&context, &crate_id, &package.name) diff --git a/tooling/lsp/src/types.rs b/tooling/lsp/src/types.rs index 57eb2dd3618..fa3234cf3bb 100644 --- a/tooling/lsp/src/types.rs +++ b/tooling/lsp/src/types.rs @@ -1,7 +1,7 @@ use fm::FileId; use lsp_types::{ - DeclarationCapability, DefinitionOptions, OneOf, ReferencesOptions, RenameOptions, - TypeDefinitionProviderCapability, + DeclarationCapability, DefinitionOptions, DocumentSymbolOptions, HoverOptions, + InlayHintOptions, OneOf, ReferencesOptions, RenameOptions, TypeDefinitionProviderCapability, }; use noirc_driver::DebugFile; use noirc_errors::{debug_info::OpCodesCount, Location}; @@ -144,6 +144,18 @@ pub(crate) struct ServerCapabilities { /// The server provides references support. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) references_provider: Option>, + + /// The server provides hover support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) hover_provider: Option>, + + /// The server provides inlay hints support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) inlay_hint_provider: Option>, + + /// The server provides document symbol support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) document_symbol_provider: Option>, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] diff --git a/tooling/lsp/test_programs/document_symbol/Nargo.toml b/tooling/lsp/test_programs/document_symbol/Nargo.toml new file mode 100644 index 00000000000..367b145f045 --- /dev/null +++ b/tooling/lsp/test_programs/document_symbol/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "document_symbol" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/lsp/test_programs/document_symbol/src/main.nr b/tooling/lsp/test_programs/document_symbol/src/main.nr new file mode 100644 index 00000000000..39b2c7fff12 --- /dev/null +++ b/tooling/lsp/test_programs/document_symbol/src/main.nr @@ -0,0 +1,26 @@ +fn foo(_x: i32) { + let _ = 1; +} + +struct SomeStruct { + field: i32, +} + +impl SomeStruct { + fn new() -> SomeStruct { + SomeStruct { field: 0 } + } +} + +trait SomeTrait { + fn some_method(x: U); +} + +impl SomeTrait for SomeStruct { + fn some_method(_x: i32) { + } +} + +mod submodule { + global SOME_GLOBAL = 1; +} diff --git a/tooling/lsp/test_programs/go_to_definition/src/main.nr b/tooling/lsp/test_programs/go_to_definition/src/main.nr index 76a367259b5..9223fdc0bd3 100644 --- a/tooling/lsp/test_programs/go_to_definition/src/main.nr +++ b/tooling/lsp/test_programs/go_to_definition/src/main.nr @@ -18,3 +18,12 @@ fn main() { bar::baz(); bar::inline::qux(); } + +struct Foo { + +} + +trait Trait { + +} + diff --git a/tooling/lsp/test_programs/inlay_hints/Nargo.toml b/tooling/lsp/test_programs/inlay_hints/Nargo.toml new file mode 100644 index 00000000000..89f8eda14be --- /dev/null +++ b/tooling/lsp/test_programs/inlay_hints/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "inlay_hints" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/lsp/test_programs/inlay_hints/src/main.nr b/tooling/lsp/test_programs/inlay_hints/src/main.nr new file mode 100644 index 00000000000..2b53f8de339 --- /dev/null +++ b/tooling/lsp/test_programs/inlay_hints/src/main.nr @@ -0,0 +1,94 @@ +fn main() { + let var = 0; +} + +struct Foo { + +} + +fn make_foo() -> Foo { + Foo {} +} + +fn foo() { + let foo = make_foo(); +} + +fn test_for() { + for i in 0..10 {} +} + +global var = 0; + +fn test_fn(one: i32, two: i32) {} + +fn call_test_fn() { + test_fn(1, 2); // Should show parameter names +} + +struct SomeStruct { + one: i32, +} + +impl SomeStruct { + fn some_method(self, one: i32) {} +} + +fn call_method() { + let s = SomeStruct { one: 1 }; + s.some_method(1); // Should show parameter names +} + +fn call_where_name_matches() { + let one = 1; + let two = 2; + test_fn(one, two); // Should not show parameter names (names match) +} + +fn call_where_member_name_matches() { + let s = SomeStruct { one: 1 }; + let two = 2; + test_fn(s.one, two); // Should not show parameter names (member name matches) +} + +fn one() -> i32 { + 1 +} + +fn call_where_call_matches_name() { + let two = 2; + test_fn(one(), two); // Should not show parameter names (call name matches) +} + +fn with_arg(arg: i32) {} + +fn call_with_arg() { + let x = 1; + with_arg(x); // Should not show parameter names ("arg" is a suffix of "with_arg") +} + +fn with_underscore(_x: i32) {} + +fn call_with_underscore() { + with_underscore(1); // Should not show parameter names (param name starts with underscore) +} + +fn one_arg_with_one_char(x: i32) {} + +fn call_one_arg_with_one_char() { + one_arg_with_one_char(1); // Should not show parameter names (only one param and it's a single letter) +} + +fn one_arg_with_obvious_name(other: i32) {} + +fn call_one_arg_with_obvious_name() { + one_arg_with_obvious_name(1); // Should not show parameter names (only one param and it's an obvious name) +} + +fn yet_another_function(name: i32) {} + +fn call_yet_another_function() { + let some_name = 1; + yet_another_function(some_name) // Should not show parameter names ("name" is a suffix of "some_name") +} + diff --git a/tooling/lsp/test_programs/rename_function/src/main.nr b/tooling/lsp/test_programs/rename_function/src/main.nr index 7a70084276e..e77b50c0b26 100644 --- a/tooling/lsp/test_programs/rename_function/src/main.nr +++ b/tooling/lsp/test_programs/rename_function/src/main.nr @@ -25,3 +25,16 @@ use foo::some_other_function as bar; fn x() { bar(); } + +struct SomeStruct { + +} + +impl SomeStruct { + fn some_method(self) {} +} + +fn y() { + let some_struct = SomeStruct {}; + some_struct.some_method(); +} diff --git a/tooling/lsp/test_programs/workspace/Nargo.toml b/tooling/lsp/test_programs/workspace/Nargo.toml new file mode 100644 index 00000000000..d0a0badc295 --- /dev/null +++ b/tooling/lsp/test_programs/workspace/Nargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["one", "two"] diff --git a/tooling/lsp/test_programs/workspace/one/Nargo.toml b/tooling/lsp/test_programs/workspace/one/Nargo.toml new file mode 100644 index 00000000000..39838d73362 --- /dev/null +++ b/tooling/lsp/test_programs/workspace/one/Nargo.toml @@ -0,0 +1,4 @@ +[package] +name = "one" +authors = [] +type = "lib" diff --git a/tooling/lsp/test_programs/workspace/one/src/lib.nr b/tooling/lsp/test_programs/workspace/one/src/lib.nr new file mode 100644 index 00000000000..61f282fa2a7 --- /dev/null +++ b/tooling/lsp/test_programs/workspace/one/src/lib.nr @@ -0,0 +1,25 @@ +pub fn function_one() {} + +mod subone { + struct SubOneStruct { + some_field: i32, + some_other_field: Field, + } + + impl SubOneStruct { + fn foo(self, x: i32, y: i32) -> Field { + 0 + } + } + + trait SomeTrait { + } + + global some_global = 2; + + type SomeAlias = i32; + + struct GenericStruct { + + } +} diff --git a/tooling/lsp/test_programs/workspace/two/Nargo.toml b/tooling/lsp/test_programs/workspace/two/Nargo.toml new file mode 100644 index 00000000000..26d99b65df1 --- /dev/null +++ b/tooling/lsp/test_programs/workspace/two/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "two" +authors = [] +type = "lib" + +[dependencies] +one = { path = "../one" } \ No newline at end of file diff --git a/tooling/lsp/test_programs/workspace/two/src/lib.nr b/tooling/lsp/test_programs/workspace/two/src/lib.nr new file mode 100644 index 00000000000..3f0f0f117b7 --- /dev/null +++ b/tooling/lsp/test_programs/workspace/two/src/lib.nr @@ -0,0 +1,53 @@ +use one::function_one; + +pub fn function_two() { + function_one() +} + +use one::subone; + +fn use_struct() { + let _ = subone::SubOneStruct { some_field: 0, some_other_field: 2 }; +} + +use one::subone::SomeTrait; + +fn use_global() { + let _ = one::subone::some_global; +} + +fn use_struct_method() { + let s = subone::SubOneStruct { some_field: 0, some_other_field: 2 }; + s.foo(0, 1); +} + +fn use_local_var() { + let regular_var = 0; + let _ = regular_var; + let mut mutable_var = 0; + mutable_var = 1; +} + +fn use_parameter(some_param: i32) { + let _ = some_param; +} + +use one::subone::SomeAlias; + +use std::default::Default; + +fn use_impl_method() { + let _: i32 = Default::default(); +} + +mod other; +use other::another_function; +use crate::other::other_function; + +use one::subone::GenericStruct; + +use std::collections::bounded_vec::BoundedVec; + +fn instantiate_generic() { + let x: BoundedVec = BoundedVec::new(); +} diff --git a/tooling/lsp/test_programs/workspace/two/src/other.nr b/tooling/lsp/test_programs/workspace/two/src/other.nr new file mode 100644 index 00000000000..4d2fffcee80 --- /dev/null +++ b/tooling/lsp/test_programs/workspace/two/src/other.nr @@ -0,0 +1,2 @@ +fn other_function() {} +fn another_function() {} diff --git a/tooling/nargo/src/package.rs b/tooling/nargo/src/package.rs index 44f0a3504f7..f55ca5550a3 100644 --- a/tooling/nargo/src/package.rs +++ b/tooling/nargo/src/package.rs @@ -1,5 +1,6 @@ use std::{collections::BTreeMap, fmt::Display, path::PathBuf}; +use acvm::acir::circuit::ExpressionWidth; use noirc_frontend::graph::CrateName; use crate::constants::PROVER_INPUT_FILE; @@ -51,6 +52,7 @@ pub struct Package { pub entry_path: PathBuf, pub name: CrateName, pub dependencies: BTreeMap, + pub expression_width: Option, } impl Package { diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 9dfa0dfe861..74e07efb5c1 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -35,6 +35,7 @@ fn main() { generate_noir_test_failure_tests(&mut test_file, &test_dir); generate_compile_success_empty_tests(&mut test_file, &test_dir); generate_compile_success_contract_tests(&mut test_file, &test_dir); + generate_compile_success_no_bug_tests(&mut test_file, &test_dir); generate_compile_failure_tests(&mut test_file, &test_dir); } @@ -58,21 +59,6 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ "is_unconstrained", ]; -/// Certain features are only available in the elaborator. -/// We skip these tests for non-elaborator code since they are not -/// expected to work there. This can be removed once the old code is removed. -const IGNORED_NEW_FEATURE_TESTS: [&str; 9] = [ - "macros", - "wildcard_type", - "type_definition_annotation", - "numeric_generics_explicit", - "derive_impl", - "comptime_traits", - "comptime_slice_methods", - "unary_operator_overloading", - "unquote_multiple_items_from_annotation", -]; - fn read_test_cases( test_data_dir: &Path, test_sub_dir: &str, @@ -133,19 +119,6 @@ fn generate_execution_success_tests(test_file: &mut File, test_data_dir: &Path) nargo.assert().success();"#, ); - if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - generate_test_case( - test_file, - test_type, - &format!("legacy_{test_name}"), - &test_dir, - r#" - nargo.arg("execute").arg("--force").arg("--use-legacy"); - - nargo.assert().success();"#, - ); - } - if !IGNORED_BRILLIG_TESTS.contains(&test_name.as_str()) { generate_test_case( test_file, @@ -177,17 +150,6 @@ fn generate_execution_failure_tests(test_file: &mut File, test_data_dir: &Path) nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, ); - - generate_test_case( - test_file, - test_type, - &format!("legacy_{test_name}"), - &test_dir, - r#" - nargo.arg("execute").arg("--force").arg("--use-legacy"); - - nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, - ); } } @@ -207,17 +169,6 @@ fn generate_noir_test_success_tests(test_file: &mut File, test_data_dir: &Path) nargo.assert().success();"#, ); - - generate_test_case( - test_file, - test_type, - &format!("legacy_{test_name}"), - &test_dir, - r#" - nargo.arg("test").arg("--use-legacy"); - - nargo.assert().success();"#, - ); } } @@ -236,17 +187,6 @@ fn generate_noir_test_failure_tests(test_file: &mut File, test_data_dir: &Path) nargo.assert().failure();"#, ); - - generate_test_case( - test_file, - test_type, - &format!("legacy_{test_name}"), - &test_dir, - r#" - nargo.arg("test").arg("--use-legacy"); - - nargo.assert().failure();"#, - ); } } @@ -283,21 +223,6 @@ fn generate_compile_success_empty_tests(test_file: &mut File, test_data_dir: &Pa {assert_zero_opcodes}"#, ), ); - - if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - generate_test_case( - test_file, - test_type, - &format!("legacy_{test_name}"), - &test_dir, - &format!( - r#" - nargo.arg("info").arg("--json").arg("--force").arg("--use-legacy"); - - {assert_zero_opcodes}"#, - ), - ); - } } } @@ -314,19 +239,26 @@ fn generate_compile_success_contract_tests(test_file: &mut File, test_data_dir: &test_dir, r#" nargo.arg("compile").arg("--force"); - nargo.assert().success();"#, ); + } +} + +/// Generate tests for checking that the contract compiles and there are no "bugs" in stderr +fn generate_compile_success_no_bug_tests(test_file: &mut File, test_data_dir: &Path) { + let test_type = "compile_success_no_bug"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); generate_test_case( test_file, test_type, - &format!("legacy_{test_name}"), + &test_name, &test_dir, r#" - nargo.arg("compile").arg("--force").arg("--use-legacy"); - - nargo.assert().success();"#, + nargo.arg("compile").arg("--force"); + nargo.assert().success().stderr(predicate::str::contains("bug:").not());"#, ); } } @@ -346,18 +278,5 @@ fn generate_compile_failure_tests(test_file: &mut File, test_data_dir: &Path) { nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, ); - - if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - generate_test_case( - test_file, - test_type, - &format!("legacy_{test_name}"), - &test_dir, - r#" - nargo.arg("compile").arg("--force").arg("--use-legacy"); - - nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, - ); - } } } diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 95726492418..d40bae1ecfd 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -87,7 +87,6 @@ fn check_package( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, - compile_options.use_legacy, compile_options.debug_comptime_in_file.as_deref(), )?; @@ -161,17 +160,10 @@ pub(crate) fn check_crate_and_report_errors( deny_warnings: bool, disable_macros: bool, silence_warnings: bool, - use_legacy: bool, debug_comptime_in_file: Option<&str>, ) -> Result<(), CompileError> { - let result = check_crate( - context, - crate_id, - deny_warnings, - disable_macros, - use_legacy, - debug_comptime_in_file, - ); + let result = + check_crate(context, crate_id, deny_warnings, disable_macros, debug_comptime_in_file); report_errors(result, &context.file_manager, deny_warnings, silence_warnings) } diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index e83b1728c93..a2877ebdeac 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -2,6 +2,7 @@ use std::io::Write; use std::path::Path; use std::time::Duration; +use acvm::acir::circuit::ExpressionWidth; use fm::FileManager; use nargo::ops::{collect_errors, compile_contract, compile_program, report_errors}; use nargo::package::Package; @@ -190,7 +191,11 @@ fn compile_programs( compile_options, load_cached_program(package), )?; - let program = nargo::ops::transform_program(program, compile_options.expression_width); + + let target_width = + get_target_width(package.expression_width, compile_options.expression_width); + let program = nargo::ops::transform_program(program, target_width); + save_program_to_file( &program.clone().into(), &package.name, @@ -216,8 +221,9 @@ fn compiled_contracts( .map(|package| { let (contract, warnings) = compile_contract(file_manager, parsed_files, package, compile_options)?; - let contract = - nargo::ops::transform_contract(contract, compile_options.expression_width); + let target_width = + get_target_width(package.expression_width, compile_options.expression_width); + let contract = nargo::ops::transform_contract(contract, target_width); save_contract(contract, package, target_dir, compile_options.show_artifact_paths); Ok(((), warnings)) }) @@ -243,3 +249,21 @@ fn save_contract( println!("Saved contract artifact to: {}", artifact_path.display()); } } + +/// Default expression width used for Noir compilation. +/// The ACVM native type `ExpressionWidth` has its own default which should always be unbounded, +/// while we can sometimes expect the compilation target width to change. +/// Thus, we set it separately here rather than trying to alter the default derivation of the type. +const DEFAULT_EXPRESSION_WIDTH: ExpressionWidth = ExpressionWidth::Bounded { width: 4 }; + +/// If a target width was not specified in the CLI we can safely override the default. +pub(crate) fn get_target_width( + package_default_width: Option, + compile_options_width: Option, +) -> ExpressionWidth { + if let (Some(manifest_default_width), None) = (package_default_width, compile_options_width) { + manifest_default_width + } else { + compile_options_width.unwrap_or(DEFAULT_EXPRESSION_WIDTH) + } +} diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 778009bf791..311af9b9db0 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -1,6 +1,6 @@ use std::path::PathBuf; -use acvm::acir::native_types::{WitnessMap, WitnessStack}; +use acvm::acir::native_types::WitnessStack; use acvm::FieldElement; use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; @@ -15,7 +15,6 @@ use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_artifacts::debug::DebugArtifact; use noirc_driver::{ file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, }; @@ -23,6 +22,7 @@ use noirc_frontend::debug::DebugInstrumenter; use noirc_frontend::graph::CrateName; use noirc_frontend::hir::ParsedFiles; +use super::compile_cmd::get_target_width; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; use crate::errors::CliError; @@ -81,8 +81,10 @@ pub(crate) fn run(args: DebugCommand, config: NargoConfig) -> Result<(), CliErro args.compile_options.clone(), )?; - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); + let target_width = + get_target_width(package.expression_width, args.compile_options.expression_width); + + let compiled_program = nargo::ops::transform_program(compiled_program, target_width); run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } @@ -169,10 +171,10 @@ fn run_async( runtime.block_on(async { println!("[{}] Starting debugger", package.name); - let (return_value, solved_witness) = + let (return_value, witness_stack) = debug_program_and_decode(program, package, prover_name)?; - if let Some(solved_witness) = solved_witness { + if let Some(solved_witness_stack) = witness_stack { println!("[{}] Circuit witness successfully solved", package.name); if let Some(return_value) = return_value { @@ -180,11 +182,8 @@ fn run_async( } if let Some(witness_name) = witness_name { - let witness_path = save_witness_to_dir( - WitnessStack::from(solved_witness), - witness_name, - target_dir, - )?; + let witness_path = + save_witness_to_dir(solved_witness_stack, witness_name, target_dir)?; println!("[{}] Witness saved to {}", package.name, witness_path.display()); } @@ -200,38 +199,32 @@ fn debug_program_and_decode( program: CompiledProgram, package: &Package, prover_name: &str, -) -> Result<(Option, Option>), CliError> { +) -> Result<(Option, Option>), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; - let solved_witness = debug_program(&program, &inputs_map)?; - - match solved_witness { - Some(witness) => { - let (_, return_value) = program.abi.decode(&witness)?; - Ok((return_value, Some(witness))) + let program_abi = program.abi.clone(); + let witness_stack = debug_program(program, &inputs_map)?; + + match witness_stack { + Some(witness_stack) => { + let main_witness = &witness_stack + .peek() + .expect("Should have at least one witness on the stack") + .witness; + let (_, return_value) = program_abi.decode(main_witness)?; + Ok((return_value, Some(witness_stack))) } None => Ok((None, None)), } } pub(crate) fn debug_program( - compiled_program: &CompiledProgram, + compiled_program: CompiledProgram, inputs_map: &InputMap, -) -> Result>, CliError> { +) -> Result>, CliError> { let initial_witness = compiled_program.abi.encode(inputs_map, None)?; - let debug_artifact = DebugArtifact { - debug_symbols: compiled_program.debug.clone(), - file_map: compiled_program.file_map.clone(), - }; - - noir_debugger::debug_circuit( - &Bn254BlackBoxSolver, - &compiled_program.program.functions[0], - debug_artifact, - initial_witness, - &compiled_program.program.unconstrained_functions, - ) - .map_err(CliError::from) + noir_debugger::run_repl_session(&Bn254BlackBoxSolver, compiled_program, initial_witness) + .map_err(CliError::from) } diff --git a/tooling/nargo_cli/src/cli/export_cmd.rs b/tooling/nargo_cli/src/cli/export_cmd.rs index 105190c653f..1b7ba97d68d 100644 --- a/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/tooling/nargo_cli/src/cli/export_cmd.rs @@ -89,7 +89,6 @@ fn compile_exported_functions( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, - compile_options.use_legacy, compile_options.debug_comptime_in_file.as_deref(), )?; diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index 3759fb31c76..a6395d1c8c9 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -16,7 +16,9 @@ use serde::Serialize; use crate::errors::CliError; use super::{ - compile_cmd::compile_workspace_full, fs::program::read_program_from_file, NargoConfig, + compile_cmd::{compile_workspace_full, get_target_width}, + fs::program::read_program_from_file, + NargoConfig, }; /// Provides detailed information on each of a program's function (represented by a single circuit) @@ -84,11 +86,9 @@ pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError .into_iter() .par_bridge() .map(|(package, program)| { - count_opcodes_and_gates_in_program( - program, - &package, - args.compile_options.expression_width, - ) + let target_width = + get_target_width(package.expression_width, args.compile_options.expression_width); + count_opcodes_and_gates_in_program(program, &package, target_width) }) .collect(); diff --git a/tooling/nargo_cli/src/cli/mod.rs b/tooling/nargo_cli/src/cli/mod.rs index 485ccc7abaf..10ec38ad1d5 100644 --- a/tooling/nargo_cli/src/cli/mod.rs +++ b/tooling/nargo_cli/src/cli/mod.rs @@ -63,7 +63,6 @@ enum NargoCommand { Execute(execute_cmd::ExecuteCommand), #[command(hide = true)] // Hidden while the feature is being built out Export(export_cmd::ExportCommand), - #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index 27c66c956d9..c8848e2e304 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -176,7 +176,6 @@ fn run_test + Default>( crate_id, compile_options.deny_warnings, compile_options.disable_macros, - compile_options.use_legacy, compile_options.debug_comptime_in_file.as_deref(), ) .expect("Any errors should have occurred when collecting test functions"); @@ -244,7 +243,6 @@ fn get_tests_in_package( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, - compile_options.use_legacy, compile_options.debug_comptime_in_file.as_deref(), )?; diff --git a/tooling/nargo_cli/tests/stdlib-tests.rs b/tooling/nargo_cli/tests/stdlib-tests.rs index 10711b6d011..c4cc792438e 100644 --- a/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/tooling/nargo_cli/tests/stdlib-tests.rs @@ -27,12 +27,13 @@ fn run_stdlib_tests() { entry_path: PathBuf::from("main.nr"), name: "stdlib".parse().unwrap(), dependencies: BTreeMap::new(), + expression_width: None, }; let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, false, false, false, None); + let result = check_crate(&mut context, dummy_crate_id, false, false, None); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library"); diff --git a/tooling/nargo_fmt/src/rewrite/imports.rs b/tooling/nargo_fmt/src/rewrite/imports.rs index 564ef3fa370..025d354259e 100644 --- a/tooling/nargo_fmt/src/rewrite/imports.rs +++ b/tooling/nargo_fmt/src/rewrite/imports.rs @@ -14,6 +14,7 @@ pub(crate) enum UseSegment { List(Vec), Dep, Crate, + Super, } impl UseSegment { @@ -50,6 +51,7 @@ impl UseSegment { } UseSegment::Dep => "dep".into(), UseSegment::Crate => "crate".into(), + UseSegment::Super => "super".into(), } } } @@ -66,6 +68,7 @@ impl UseTree { match use_tree.prefix.kind { ast::PathKind::Crate => result.path.push(UseSegment::Crate), ast::PathKind::Dep => result.path.push(UseSegment::Dep), + ast::PathKind::Super => result.path.push(UseSegment::Super), ast::PathKind::Plain => {} }; diff --git a/tooling/nargo_fmt/tests/expected/use_super.nr b/tooling/nargo_fmt/tests/expected/use_super.nr new file mode 100644 index 00000000000..91fbe7a9df1 --- /dev/null +++ b/tooling/nargo_fmt/tests/expected/use_super.nr @@ -0,0 +1,5 @@ +fn some_func() {} + +mod foo { + use super::some_func; +} diff --git a/tooling/nargo_fmt/tests/input/use_super.nr b/tooling/nargo_fmt/tests/input/use_super.nr new file mode 100644 index 00000000000..a3b7d4cb4e2 --- /dev/null +++ b/tooling/nargo_fmt/tests/input/use_super.nr @@ -0,0 +1,7 @@ +fn some_func() { + +} + +mod foo { + use super::some_func; +} diff --git a/tooling/nargo_toml/Cargo.toml b/tooling/nargo_toml/Cargo.toml index 574972d99e7..7c9faa4562a 100644 --- a/tooling/nargo_toml/Cargo.toml +++ b/tooling/nargo_toml/Cargo.toml @@ -18,6 +18,7 @@ serde.workspace = true thiserror.workspace = true toml.workspace = true url.workspace = true +noirc_driver.workspace = true semver = "1.0.20" [dev-dependencies] diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index 77fe77bcdbb..1ee8e90c8e5 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -72,6 +72,9 @@ pub enum ManifestError { #[error("Cyclic package dependency found when processing {cycle}")] CyclicDependency { cycle: String }, + + #[error("Failed to parse expression width with the following error: {0}")] + ParseExpressionWidth(String), } #[allow(clippy::enum_variant_names)] diff --git a/tooling/nargo_toml/src/lib.rs b/tooling/nargo_toml/src/lib.rs index 985cb30dc24..c0d8c7997fd 100644 --- a/tooling/nargo_toml/src/lib.rs +++ b/tooling/nargo_toml/src/lib.rs @@ -14,6 +14,7 @@ use nargo::{ package::{Dependency, Package, PackageType}, workspace::Workspace, }; +use noirc_driver::parse_expression_width; use noirc_frontend::graph::CrateName; use serde::Deserialize; @@ -199,6 +200,16 @@ impl PackageConfig { })?; } + let expression_width = self + .package + .expression_width + .as_ref() + .map(|expression_width| { + parse_expression_width(expression_width) + .map_err(|err| ManifestError::ParseExpressionWidth(err.to_string())) + }) + .map_or(Ok(None), |res| res.map(Some))?; + Ok(Package { version: self.package.version.clone(), compiler_required_version: self.package.compiler_version.clone(), @@ -207,6 +218,7 @@ impl PackageConfig { package_type, name, dependencies, + expression_width, }) } } @@ -275,6 +287,7 @@ struct PackageMetadata { // so you will not need to supply an ACIR and compiler version compiler_version: Option, license: Option, + expression_width: Option, } #[derive(Debug, Deserialize, Clone)] @@ -531,3 +544,18 @@ fn parse_workspace_default_member_toml() { assert!(Config::try_from(String::from(src)).is_ok()); assert!(Config::try_from(src).is_ok()); } + +#[test] +fn parse_package_expression_width_toml() { + let src = r#" + [package] + name = "test" + version = "0.1.0" + type = "bin" + authors = [""] + expression_width = "3" + "#; + + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); +} diff --git a/tooling/nargo_toml/src/semver.rs b/tooling/nargo_toml/src/semver.rs index 7c6e2a18b31..253ac82aa34 100644 --- a/tooling/nargo_toml/src/semver.rs +++ b/tooling/nargo_toml/src/semver.rs @@ -89,6 +89,7 @@ mod tests { name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), version: Some("1.0".to_string()), + expression_width: None, }; if let Err(err) = semver_check_package(&package, &compiler_version) { panic!("semver check should have passed. compiler version is 0.1.0 and required version from the package is 0.1.0\n error: {err:?}") @@ -120,6 +121,7 @@ mod tests { name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), version: Some("1.0".to_string()), + expression_width: None, }; let valid_dependency = Package { @@ -130,6 +132,7 @@ mod tests { name: CrateName::from_str("good_dependency").unwrap(), dependencies: BTreeMap::new(), version: Some("1.0".to_string()), + expression_width: None, }; let invalid_dependency = Package { compiler_required_version: Some("0.2.0".to_string()), @@ -139,6 +142,7 @@ mod tests { name: CrateName::from_str("bad_dependency").unwrap(), dependencies: BTreeMap::new(), version: Some("1.0".to_string()), + expression_width: None, }; package.dependencies.insert( @@ -179,6 +183,7 @@ mod tests { name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), version: Some("1.0".to_string()), + expression_width: None, }; if let Err(err) = semver_check_package(&package, &compiler_version) { @@ -198,6 +203,7 @@ mod tests { name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), version: Some("1.0".to_string()), + expression_width: None, }; if let Err(err) = semver_check_package(&package, &compiler_version) { diff --git a/tooling/noir_codegen/package.json b/tooling/noir_codegen/package.json index eb9694a3b0c..fab6b8466d9 100644 --- a/tooling/noir_codegen/package.json +++ b/tooling/noir_codegen/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.31.0", + "version": "0.32.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index 3bb2ab5826f..a1b2e175688 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.31.0", + "version": "0.32.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 12793d70545..06c40d59a6a 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.31.0", + "version": "0.32.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/tooling/noir_js_types/package.json b/tooling/noir_js_types/package.json index 0de1a1fd4be..8548486d58c 100644 --- a/tooling/noir_js_types/package.json +++ b/tooling/noir_js_types/package.json @@ -4,7 +4,7 @@ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.31.0", + "version": "0.32.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index b33bb159fcd..05588da37a4 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.31.0", + "version": "0.32.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/tooling/profiler/src/cli/gates_flamegraph_cmd.rs b/tooling/profiler/src/cli/gates_flamegraph_cmd.rs index 154ac38f4bb..98e89e42015 100644 --- a/tooling/profiler/src/cli/gates_flamegraph_cmd.rs +++ b/tooling/profiler/src/cli/gates_flamegraph_cmd.rs @@ -19,6 +19,9 @@ pub(crate) struct GatesFlamegraphCommand { #[clap(long, short)] backend_path: String, + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + backend_extra_args: Vec, + /// The output folder for the flamegraph svg files #[clap(long, short)] output: String, @@ -27,7 +30,10 @@ pub(crate) struct GatesFlamegraphCommand { pub(crate) fn run(args: GatesFlamegraphCommand) -> eyre::Result<()> { run_with_provider( &PathBuf::from(args.artifact_path), - &BackendGatesProvider { backend_path: PathBuf::from(args.backend_path) }, + &BackendGatesProvider { + backend_path: PathBuf::from(args.backend_path), + extra_args: args.backend_extra_args, + }, &InfernoFlamegraphGenerator { count_name: "gates".to_string() }, &PathBuf::from(args.output), ) diff --git a/tooling/profiler/src/gates_provider.rs b/tooling/profiler/src/gates_provider.rs index caed2666426..f96b1292987 100644 --- a/tooling/profiler/src/gates_provider.rs +++ b/tooling/profiler/src/gates_provider.rs @@ -10,12 +10,20 @@ pub(crate) trait GatesProvider { pub(crate) struct BackendGatesProvider { pub(crate) backend_path: PathBuf, + pub(crate) extra_args: Vec, } impl GatesProvider for BackendGatesProvider { fn get_gates(&self, artifact_path: &Path) -> eyre::Result { - let backend_gates_response = - Command::new(&self.backend_path).arg("gates").arg("-b").arg(artifact_path).output()?; + let mut backend_gates_cmd = Command::new(&self.backend_path); + + backend_gates_cmd.arg("gates").arg("-b").arg(artifact_path); + + for arg in &self.extra_args { + backend_gates_cmd.arg(arg); + } + + let backend_gates_response = backend_gates_cmd.output()?; // Parse the backend gates command stdout as json let backend_gates_response: BackendGatesResponse = diff --git a/yarn.lock b/yarn.lock index dc033a7344f..5a442d77b30 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1747,52 +1747,6 @@ __metadata: languageName: node linkType: hard -"@chainsafe/as-sha256@npm:^0.3.1": - version: 0.3.1 - resolution: "@chainsafe/as-sha256@npm:0.3.1" - checksum: 58ea733be1657b0e31dbf48b0dba862da0833df34a81c1460c7352f04ce90874f70003cbf34d0afb9e5e53a33ee2d63a261a8b12462be85b2ba0a6f7f13d6150 - languageName: node - linkType: hard - -"@chainsafe/persistent-merkle-tree@npm:^0.4.2": - version: 0.4.2 - resolution: "@chainsafe/persistent-merkle-tree@npm:0.4.2" - dependencies: - "@chainsafe/as-sha256": ^0.3.1 - checksum: f9cfcb2132a243992709715dbd28186ab48c7c0c696f29d30857693cca5526bf753974a505ef68ffd5623bbdbcaa10f9083f4dd40bf99eb6408e451cc26a1a9e - languageName: node - linkType: hard - -"@chainsafe/persistent-merkle-tree@npm:^0.5.0": - version: 0.5.0 - resolution: "@chainsafe/persistent-merkle-tree@npm:0.5.0" - dependencies: - "@chainsafe/as-sha256": ^0.3.1 - checksum: 2c67203da776c79cd3a6132e2d672fe132393b2e63dc71604e3134acc8c0ec25cc5e431051545939ea0f7c5ff2066fb806b9e5cab974ca085d046226a1671f7d - languageName: node - linkType: hard - -"@chainsafe/ssz@npm:^0.10.0": - version: 0.10.2 - resolution: "@chainsafe/ssz@npm:0.10.2" - dependencies: - "@chainsafe/as-sha256": ^0.3.1 - "@chainsafe/persistent-merkle-tree": ^0.5.0 - checksum: 6bb70cf741d0a19dd0b28b3f6f067b96fa39f556e2eefa6ac745b21db9c3b3a8393dc3cca8ff4a6ce065ed71ddc3fb1b2b390a92004b9d01067c26e2558e5503 - languageName: node - linkType: hard - -"@chainsafe/ssz@npm:^0.9.2": - version: 0.9.4 - resolution: "@chainsafe/ssz@npm:0.9.4" - dependencies: - "@chainsafe/as-sha256": ^0.3.1 - "@chainsafe/persistent-merkle-tree": ^0.4.2 - case: ^1.6.3 - checksum: c6eaedeae9e5618b3c666ff4507a27647f665a8dcf17d5ca86da4ed4788c5a93868f256d0005467d184fdf35ec03f323517ec2e55ec42492d769540a2ec396bc - languageName: node - linkType: hard - "@colors/colors@npm:1.5.0": version: 1.5.0 resolution: "@colors/colors@npm:1.5.0" @@ -3501,7 +3455,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/abi@npm:5.7.0, @ethersproject/abi@npm:^5.1.2, @ethersproject/abi@npm:^5.7.0": +"@ethersproject/abi@npm:^5.1.2": version: 5.7.0 resolution: "@ethersproject/abi@npm:5.7.0" dependencies: @@ -3518,7 +3472,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/abstract-provider@npm:5.7.0, @ethersproject/abstract-provider@npm:^5.7.0": +"@ethersproject/abstract-provider@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/abstract-provider@npm:5.7.0" dependencies: @@ -3533,7 +3487,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/abstract-signer@npm:5.7.0, @ethersproject/abstract-signer@npm:^5.7.0": +"@ethersproject/abstract-signer@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/abstract-signer@npm:5.7.0" dependencies: @@ -3546,7 +3500,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/address@npm:5.7.0, @ethersproject/address@npm:^5.7.0": +"@ethersproject/address@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/address@npm:5.7.0" dependencies: @@ -3559,7 +3513,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/base64@npm:5.7.0, @ethersproject/base64@npm:^5.7.0": +"@ethersproject/base64@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/base64@npm:5.7.0" dependencies: @@ -3568,17 +3522,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/basex@npm:5.7.0, @ethersproject/basex@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/basex@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - checksum: 326087b7e1f3787b5fe6cd1cf2b4b5abfafbc355a45e88e22e5e9d6c845b613ffc5301d629b28d5c4d5e2bfe9ec424e6782c804956dff79be05f0098cb5817de - languageName: node - linkType: hard - -"@ethersproject/bignumber@npm:5.7.0, @ethersproject/bignumber@npm:^5.7.0": +"@ethersproject/bignumber@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/bignumber@npm:5.7.0" dependencies: @@ -3589,7 +3533,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/bytes@npm:5.7.0, @ethersproject/bytes@npm:^5.7.0": +"@ethersproject/bytes@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/bytes@npm:5.7.0" dependencies: @@ -3598,7 +3542,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/constants@npm:5.7.0, @ethersproject/constants@npm:^5.7.0": +"@ethersproject/constants@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/constants@npm:5.7.0" dependencies: @@ -3607,25 +3551,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/contracts@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/contracts@npm:5.7.0" - dependencies: - "@ethersproject/abi": ^5.7.0 - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - checksum: 6ccf1121cba01b31e02f8c507cb971ab6bfed85706484a9ec09878ef1594a62215f43c4fdef8f4a4875b99c4a800bc95e3be69b1803f8ce479e07634b5a740c0 - languageName: node - linkType: hard - -"@ethersproject/hash@npm:5.7.0, @ethersproject/hash@npm:^5.7.0": +"@ethersproject/hash@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/hash@npm:5.7.0" dependencies: @@ -3642,48 +3568,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/hdnode@npm:5.7.0, @ethersproject/hdnode@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/hdnode@npm:5.7.0" - dependencies: - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/basex": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/pbkdf2": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - "@ethersproject/signing-key": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/wordlists": ^5.7.0 - checksum: bfe5ca2d89a42de73655f853170ef4766b933c5f481cddad709b3aca18823275b096e572f92d1602a052f80b426edde44ad6b9d028799775a7dad4a5bbed2133 - languageName: node - linkType: hard - -"@ethersproject/json-wallets@npm:5.7.0, @ethersproject/json-wallets@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/json-wallets@npm:5.7.0" - dependencies: - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/hdnode": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/pbkdf2": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/random": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - aes-js: 3.0.0 - scrypt-js: 3.0.1 - checksum: f583458d22db62efaaf94d38dd243482776a45bf90f9f3882fbad5aa0b8fd288b41eb7c1ff8ec0b99c9b751088e43d6173530db64dd33c59f9d8daa8d7ad5aa2 - languageName: node - linkType: hard - -"@ethersproject/keccak256@npm:5.7.0, @ethersproject/keccak256@npm:^5.7.0": +"@ethersproject/keccak256@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/keccak256@npm:5.7.0" dependencies: @@ -3693,14 +3578,14 @@ __metadata: languageName: node linkType: hard -"@ethersproject/logger@npm:5.7.0, @ethersproject/logger@npm:^5.7.0": +"@ethersproject/logger@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/logger@npm:5.7.0" checksum: 075ab2f605f1fd0813f2e39c3308f77b44a67732b36e712d9bc085f22a84aac4da4f71b39bee50fe78da3e1c812673fadc41180c9970fe5e486e91ea17befe0d languageName: node linkType: hard -"@ethersproject/networks@npm:5.7.1, @ethersproject/networks@npm:^5.7.0": +"@ethersproject/networks@npm:^5.7.0": version: 5.7.1 resolution: "@ethersproject/networks@npm:5.7.1" dependencies: @@ -3709,17 +3594,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/pbkdf2@npm:5.7.0, @ethersproject/pbkdf2@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/pbkdf2@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - checksum: b895adb9e35a8a127e794f7aadc31a2424ef355a70e51cde10d457e3e888bb8102373199a540cf61f2d6b9a32e47358f9c65b47d559f42bf8e596b5fd67901e9 - languageName: node - linkType: hard - -"@ethersproject/properties@npm:5.7.0, @ethersproject/properties@npm:^5.7.0": +"@ethersproject/properties@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/properties@npm:5.7.0" dependencies: @@ -3728,45 +3603,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/providers@npm:5.7.2, @ethersproject/providers@npm:^5.7.1, @ethersproject/providers@npm:^5.7.2": - version: 5.7.2 - resolution: "@ethersproject/providers@npm:5.7.2" - dependencies: - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/base64": ^5.7.0 - "@ethersproject/basex": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/networks": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/random": ^5.7.0 - "@ethersproject/rlp": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/web": ^5.7.0 - bech32: 1.1.4 - ws: 7.4.6 - checksum: 1754c731a5ca6782ae9677f4a9cd8b6246c4ef21a966c9a01b133750f3c578431ec43ec254e699969c4a0f87e84463ded50f96b415600aabd37d2056aee58c19 - languageName: node - linkType: hard - -"@ethersproject/random@npm:5.7.0, @ethersproject/random@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/random@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - checksum: 017829c91cff6c76470852855108115b0b52c611b6be817ed1948d56ba42d6677803ec2012aa5ae298a7660024156a64c11fcf544e235e239ab3f89f0fff7345 - languageName: node - linkType: hard - -"@ethersproject/rlp@npm:5.7.0, @ethersproject/rlp@npm:^5.7.0": +"@ethersproject/rlp@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/rlp@npm:5.7.0" dependencies: @@ -3776,18 +3613,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/sha2@npm:5.7.0, @ethersproject/sha2@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/sha2@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - hash.js: 1.1.7 - checksum: 09321057c022effbff4cc2d9b9558228690b5dd916329d75c4b1ffe32ba3d24b480a367a7cc92d0f0c0b1c896814d03351ae4630e2f1f7160be2bcfbde435dbc - languageName: node - linkType: hard - -"@ethersproject/signing-key@npm:5.7.0, @ethersproject/signing-key@npm:^5.7.0": +"@ethersproject/signing-key@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/signing-key@npm:5.7.0" dependencies: @@ -3801,21 +3627,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/solidity@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/solidity@npm:5.7.0" - dependencies: - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: 9a02f37f801c96068c3e7721f83719d060175bc4e80439fe060e92bd7acfcb6ac1330c7e71c49f4c2535ca1308f2acdcb01e00133129aac00581724c2d6293f3 - languageName: node - linkType: hard - -"@ethersproject/strings@npm:5.7.0, @ethersproject/strings@npm:^5.7.0": +"@ethersproject/strings@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/strings@npm:5.7.0" dependencies: @@ -3826,7 +3638,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/transactions@npm:5.7.0, @ethersproject/transactions@npm:^5.7.0": +"@ethersproject/transactions@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/transactions@npm:5.7.0" dependencies: @@ -3843,41 +3655,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/units@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/units@npm:5.7.0" - dependencies: - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - checksum: 304714f848cd32e57df31bf545f7ad35c2a72adae957198b28cbc62166daa929322a07bff6e9c9ac4577ab6aa0de0546b065ed1b2d20b19e25748b7d475cb0fc - languageName: node - linkType: hard - -"@ethersproject/wallet@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/wallet@npm:5.7.0" - dependencies: - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/hdnode": ^5.7.0 - "@ethersproject/json-wallets": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/random": ^5.7.0 - "@ethersproject/signing-key": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/wordlists": ^5.7.0 - checksum: a4009bf7331eddab38e3015b5e9101ef92de7f705b00a6196b997db0e5635b6d83561674d46c90c6f77b87c0500fe4a6b0183ba13749efc22db59c99deb82fbd - languageName: node - linkType: hard - -"@ethersproject/web@npm:5.7.1, @ethersproject/web@npm:^5.7.0": +"@ethersproject/web@npm:^5.7.0": version: 5.7.1 resolution: "@ethersproject/web@npm:5.7.1" dependencies: @@ -3890,19 +3668,6 @@ __metadata: languageName: node linkType: hard -"@ethersproject/wordlists@npm:5.7.0, @ethersproject/wordlists@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/wordlists@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: 30eb6eb0731f9ef5faa44bf9c0c6e950bcaaef61e4d2d9ce0ae6d341f4e2d6d1f4ab4f8880bfce03b7aac4b862fb740e1421170cfbf8e2aafc359277d49e6e97 - languageName: node - linkType: hard - "@fastify/busboy@npm:^2.0.0": version: 2.1.0 resolution: "@fastify/busboy@npm:2.1.0" @@ -4548,161 +4313,117 @@ __metadata: languageName: unknown linkType: soft -"@nomicfoundation/ethereumjs-block@npm:5.0.2": - version: 5.0.2 - resolution: "@nomicfoundation/ethereumjs-block@npm:5.0.2" - dependencies: - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - ethereum-cryptography: 0.1.3 - ethers: ^5.7.1 - checksum: 7ff744f44a01f1c059ca7812a1cfc8089f87aa506af6cb39c78331dca71b32993cbd6fa05ad03f8c4f4fab73bb998a927af69e0d8ff01ae192ee5931606e09f5 +"@nomicfoundation/edr-darwin-arm64@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-darwin-arm64@npm:0.4.2" + checksum: 7835e998c2ef83924efac0694bb4392f6abf770dc7f935dd28abc1a291f830cade14750d83a46a3205338e4ddff943dda60a9849317cf42edd38d7a2ce843588 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-blockchain@npm:7.0.2": - version: 7.0.2 - resolution: "@nomicfoundation/ethereumjs-blockchain@npm:7.0.2" - dependencies: - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-ethash": 3.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - abstract-level: ^1.0.3 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - level: ^8.0.0 - lru-cache: ^5.1.1 - memory-level: ^1.0.0 - checksum: b7e440dcd73e32aa72d13bfd28cb472773c9c60ea808a884131bf7eb3f42286ad594a0864215f599332d800f3fe1f772fff4b138d2dcaa8f41e4d8389bff33e7 +"@nomicfoundation/edr-darwin-x64@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-darwin-x64@npm:0.4.2" + checksum: 94daa26610621e85cb025feb37bb93e9b89c59f908bf3eae70720d2b86632dbb1236420ae3ae6f685d563ba52519d5f860e68ccd898fa1fced831961dea2c08a languageName: node linkType: hard -"@nomicfoundation/ethereumjs-common@npm:4.0.2": - version: 4.0.2 - resolution: "@nomicfoundation/ethereumjs-common@npm:4.0.2" - dependencies: - "@nomicfoundation/ethereumjs-util": 9.0.2 - crc-32: ^1.2.0 - checksum: f0d84704d6254d374299c19884312bd5666974b4b6f342d3f10bc76e549de78d20e45a53d25fbdc146268a52335497127e4f069126da7c60ac933a158e704887 +"@nomicfoundation/edr-linux-arm64-gnu@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-linux-arm64-gnu@npm:0.4.2" + checksum: a7181e237f6ece8bd97e0f75972044dbf584c506bbac5bef586d9f7d627a2c07a279a2d892837bbedc80ea3dfb39fa66becc297238b5d715a942eed2a50745cd languageName: node linkType: hard -"@nomicfoundation/ethereumjs-ethash@npm:3.0.2": - version: 3.0.2 - resolution: "@nomicfoundation/ethereumjs-ethash@npm:3.0.2" - dependencies: - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - abstract-level: ^1.0.3 - bigint-crypto-utils: ^3.0.23 - ethereum-cryptography: 0.1.3 - checksum: e4011e4019dd9b92f7eeebfc1e6c9a9685c52d8fd0ee4f28f03e50048a23b600c714490827f59fdce497b3afb503b3fd2ebf6815ff307e9949c3efeff1403278 +"@nomicfoundation/edr-linux-arm64-musl@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-linux-arm64-musl@npm:0.4.2" + checksum: 5a849484b7a104a7e1497774c4117afc58f64d57d30889d4f6f676dddb5c695192c0789b8be0b71171a2af770167a28aa301ae3ece7a2a156d82d94388639b66 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-evm@npm:2.0.2": - version: 2.0.2 - resolution: "@nomicfoundation/ethereumjs-evm@npm:2.0.2" - dependencies: - "@ethersproject/providers": ^5.7.1 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - mcl-wasm: ^0.7.1 - rustbn.js: ~0.2.0 - checksum: a23cf570836ddc147606b02df568069de946108e640f902358fef67e589f6b371d856056ee44299d9b4e3497f8ae25faa45e6b18fefd90e9b222dc6a761d85f0 +"@nomicfoundation/edr-linux-x64-gnu@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-linux-x64-gnu@npm:0.4.2" + checksum: 0520dd9a583976fd0f49dfe6c23227f03cd811a395dc5eed1a2922b4358d7c71fdcfea8f389d4a0e23b4ec53e1435959a544380f94e48122a75f94a42b177ac7 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-rlp@npm:5.0.2": - version: 5.0.2 - resolution: "@nomicfoundation/ethereumjs-rlp@npm:5.0.2" - bin: - rlp: bin/rlp - checksum: a74434cadefca9aa8754607cc1ad7bb4bbea4ee61c6214918e60a5bbee83206850346eb64e39fd1fe97f854c7ec0163e01148c0c881dda23881938f0645a0ef2 +"@nomicfoundation/edr-linux-x64-musl@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-linux-x64-musl@npm:0.4.2" + checksum: 80c3b4346d8c27539bc005b09db233dedd8930310d1a049827661e69a8e03be9cbac27eb620a6ae9bfd46a2fbe22f83cee5af8d9e63178925d74d9c656246708 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-statemanager@npm:2.0.2": - version: 2.0.2 - resolution: "@nomicfoundation/ethereumjs-statemanager@npm:2.0.2" - dependencies: - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - ethers: ^5.7.1 - js-sdsl: ^4.1.4 - checksum: 3ab6578e252e53609afd98d8ba42a99f182dcf80252f23ed9a5e0471023ffb2502130f85fc47fa7c94cd149f9be799ed9a0942ca52a143405be9267f4ad94e64 +"@nomicfoundation/edr-win32-x64-msvc@npm:0.4.2": + version: 0.4.2 + resolution: "@nomicfoundation/edr-win32-x64-msvc@npm:0.4.2" + checksum: 736fb866fd5c2708560cbd5ae72815b5fc96e650cd74bc8bab0a1cb0e8baede4f595fdceb445c159814a6a7e8e691de227a5db49f61b3cd0ddfafd5715b397ab languageName: node linkType: hard -"@nomicfoundation/ethereumjs-trie@npm:6.0.2": - version: 6.0.2 - resolution: "@nomicfoundation/ethereumjs-trie@npm:6.0.2" +"@nomicfoundation/edr@npm:^0.4.1": + version: 0.4.2 + resolution: "@nomicfoundation/edr@npm:0.4.2" dependencies: - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - "@types/readable-stream": ^2.3.13 - ethereum-cryptography: 0.1.3 - readable-stream: ^3.6.0 - checksum: d4da918d333851b9f2cce7dbd25ab5753e0accd43d562d98fd991b168b6a08d1794528f0ade40fe5617c84900378376fe6256cdbe52c8d66bf4c53293bbc7c40 + "@nomicfoundation/edr-darwin-arm64": 0.4.2 + "@nomicfoundation/edr-darwin-x64": 0.4.2 + "@nomicfoundation/edr-linux-arm64-gnu": 0.4.2 + "@nomicfoundation/edr-linux-arm64-musl": 0.4.2 + "@nomicfoundation/edr-linux-x64-gnu": 0.4.2 + "@nomicfoundation/edr-linux-x64-musl": 0.4.2 + "@nomicfoundation/edr-win32-x64-msvc": 0.4.2 + checksum: 8c8457257b59ed9a29d88b7492e98e974d24e8318903e876a14dc0f6d5dc77948cd9053937d9730f54f920ba82ce3d244cab518d068359bcc20df88623f171ef languageName: node linkType: hard -"@nomicfoundation/ethereumjs-tx@npm:5.0.2": - version: 5.0.2 - resolution: "@nomicfoundation/ethereumjs-tx@npm:5.0.2" +"@nomicfoundation/ethereumjs-common@npm:4.0.4": + version: 4.0.4 + resolution: "@nomicfoundation/ethereumjs-common@npm:4.0.4" dependencies: - "@chainsafe/ssz": ^0.9.2 - "@ethersproject/providers": ^5.7.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - ethereum-cryptography: 0.1.3 - checksum: 0bbcea75786b2ccb559afe2ecc9866fb4566a9f157b6ffba4f50960d14f4b3da2e86e273f6fadda9b860e67cfcabf589970fb951b328cb5f900a585cd21842a2 + "@nomicfoundation/ethereumjs-util": 9.0.4 + checksum: ce3f6e4ae15b976efdb7ccda27e19aadb62b5ffee209f9503e68b4fd8633715d4d697c0cc10ccd35f5e4e977edd05100d0f214e28880ec64fff77341dc34fcdf languageName: node linkType: hard -"@nomicfoundation/ethereumjs-util@npm:9.0.2": - version: 9.0.2 - resolution: "@nomicfoundation/ethereumjs-util@npm:9.0.2" +"@nomicfoundation/ethereumjs-rlp@npm:5.0.4": + version: 5.0.4 + resolution: "@nomicfoundation/ethereumjs-rlp@npm:5.0.4" + bin: + rlp: bin/rlp.cjs + checksum: ee2c2e5776c73801dc5ed636f4988b599b4563c2d0037da542ea57eb237c69dd1ac555f6bcb5e06f70515b6459779ba0d68252a6e105132b4659ab4bf62919b0 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-tx@npm:5.0.4": + version: 5.0.4 + resolution: "@nomicfoundation/ethereumjs-tx@npm:5.0.4" dependencies: - "@chainsafe/ssz": ^0.10.0 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-common": 4.0.4 + "@nomicfoundation/ethereumjs-rlp": 5.0.4 + "@nomicfoundation/ethereumjs-util": 9.0.4 ethereum-cryptography: 0.1.3 - checksum: 3a08f7b88079ef9f53b43da9bdcb8195498fd3d3911c2feee2571f4d1204656053f058b2f650471c86f7d2d0ba2f814768c7cfb0f266eede41c848356afc4900 + peerDependencies: + c-kzg: ^2.1.2 + peerDependenciesMeta: + c-kzg: + optional: true + checksum: 0f1c87716682ccbcf4d92ffc6cf8ab557e658b90319d82be3219a091a736859f8803c73c98e4863682e3e86d264751c472d33ff6d3c3daf4e75b5f01d0af8fa3 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-vm@npm:7.0.2": - version: 7.0.2 - resolution: "@nomicfoundation/ethereumjs-vm@npm:7.0.2" - dependencies: - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-blockchain": 7.0.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-evm": 2.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-statemanager": 2.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - debug: ^4.3.3 +"@nomicfoundation/ethereumjs-util@npm:9.0.4": + version: 9.0.4 + resolution: "@nomicfoundation/ethereumjs-util@npm:9.0.4" + dependencies: + "@nomicfoundation/ethereumjs-rlp": 5.0.4 ethereum-cryptography: 0.1.3 - mcl-wasm: ^0.7.1 - rustbn.js: ~0.2.0 - checksum: 1c25ba4d0644cadb8a2b0241a4bb02e578bfd7f70e3492b855c2ab5c120cb159cb8f7486f84dc1597884bd1697feedbfb5feb66e91352afb51f3694fd8e4a043 + peerDependencies: + c-kzg: ^2.1.2 + peerDependenciesMeta: + c-kzg: + optional: true + checksum: 754439f72b11cad2d8986707ad020077dcc763c4055f73e2668a0b4cadb22aa4407faa9b3c587d9eb5b97ac337afbe037eb642bc1d5a16197284f83db3462cbe languageName: node linkType: hard @@ -6127,16 +5848,6 @@ __metadata: languageName: node linkType: hard -"@types/readable-stream@npm:^2.3.13": - version: 2.3.15 - resolution: "@types/readable-stream@npm:2.3.15" - dependencies: - "@types/node": "*" - safe-buffer: ~5.1.1 - checksum: ec36f525cad09b6c65a1dafcb5ad99b9e2ed824ec49b7aa23180ac427e5d35b8a0706193ecd79ab4253a283ad485ba03d5917a98daaaa144f0ea34f4823e9d82 - languageName: node - linkType: hard - "@types/readable-stream@npm:^4": version: 4.0.10 resolution: "@types/readable-stream@npm:4.0.10" @@ -7047,21 +6758,6 @@ __metadata: languageName: node linkType: hard -"abstract-level@npm:^1.0.0, abstract-level@npm:^1.0.2, abstract-level@npm:^1.0.3": - version: 1.0.3 - resolution: "abstract-level@npm:1.0.3" - dependencies: - buffer: ^6.0.3 - catering: ^2.1.0 - is-buffer: ^2.0.5 - level-supports: ^4.0.0 - level-transcoder: ^1.0.1 - module-error: ^1.0.1 - queue-microtask: ^1.2.3 - checksum: 70d61a3924526ebc257b138992052f9ff571a6cee5a7660836e37a1cc7081273c3acf465dd2f5e1897b38dc743a6fd9dba14a5d8a2a9d39e5787cd3da99f301d - languageName: node - linkType: hard - "abstract-leveldown@npm:~0.12.0, abstract-leveldown@npm:~0.12.1": version: 0.12.4 resolution: "abstract-leveldown@npm:0.12.4" @@ -7136,13 +6832,6 @@ __metadata: languageName: node linkType: hard -"aes-js@npm:3.0.0": - version: 3.0.0 - resolution: "aes-js@npm:3.0.0" - checksum: 251e26d533cd1a915b44896b17d5ed68c24a02484cfdd2e74ec700a309267db96651ea4eb657bf20aac32a3baa61f6e34edf8e2fec2de440a655da9942d334b8 - languageName: node - linkType: hard - "aes-js@npm:4.0.0-beta.5": version: 4.0.0-beta.5 resolution: "aes-js@npm:4.0.0-beta.5" @@ -7773,13 +7462,6 @@ __metadata: languageName: node linkType: hard -"bech32@npm:1.1.4": - version: 1.1.4 - resolution: "bech32@npm:1.1.4" - checksum: 0e98db619191548390d6f09ff68b0253ba7ae6a55db93dfdbb070ba234c1fd3308c0606fbcc95fad50437227b10011e2698b89f0181f6e7f845c499bd14d0f4b - languageName: node - linkType: hard - "big.js@npm:^5.2.2": version: 5.2.2 resolution: "big.js@npm:5.2.2" @@ -7787,13 +7469,6 @@ __metadata: languageName: node linkType: hard -"bigint-crypto-utils@npm:^3.0.23": - version: 3.3.0 - resolution: "bigint-crypto-utils@npm:3.3.0" - checksum: 9598ce57b23f776c8936d44114c9f051e62b5fa654915b664784cbcbacc5aa0485f4479571c51ff58008abb1210c0d6a234853742f07cf84bda890f2a1e01000 - languageName: node - linkType: hard - "binary-extensions@npm:^2.0.0": version: 2.2.0 resolution: "binary-extensions@npm:2.2.0" @@ -7896,7 +7571,7 @@ __metadata: languageName: node linkType: hard -"boxen@npm:^5.0.0": +"boxen@npm:^5.0.0, boxen@npm:^5.1.2": version: 5.1.2 resolution: "boxen@npm:5.1.2" dependencies: @@ -7979,18 +7654,6 @@ __metadata: languageName: node linkType: hard -"browser-level@npm:^1.0.1": - version: 1.0.1 - resolution: "browser-level@npm:1.0.1" - dependencies: - abstract-level: ^1.0.2 - catering: ^2.1.1 - module-error: ^1.0.2 - run-parallel-limit: ^1.1.0 - checksum: 67fbc77ce832940bfa25073eccff279f512ad56f545deb996a5b23b02316f5e76f4a79d381acc27eda983f5c9a2566aaf9c97e4fdd0748288c4407307537a29b - languageName: node - linkType: hard - "browser-stdout@npm:1.3.1": version: 1.3.1 resolution: "browser-stdout@npm:1.3.1" @@ -8263,20 +7926,6 @@ __metadata: languageName: node linkType: hard -"case@npm:^1.6.3": - version: 1.6.3 - resolution: "case@npm:1.6.3" - checksum: febe73278f910b0d28aab7efd6f51c235f9aa9e296148edb56dfb83fd58faa88308c30ce9a0122b6e53e0362c44f4407105bd5ef89c46860fc2b184e540fd68d - languageName: node - linkType: hard - -"catering@npm:^2.1.0, catering@npm:^2.1.1": - version: 2.1.1 - resolution: "catering@npm:2.1.1" - checksum: 205daefa69c935b0c19f3d8f2e0a520dd69aebe9bda55902958003f7c9cff8f967dfb90071b421bd6eb618576f657a89d2bc0986872c9bc04bbd66655e9d4bd6 - languageName: node - linkType: hard - "ccount@npm:^1.0.0": version: 1.1.0 resolution: "ccount@npm:1.1.0" @@ -8566,20 +8215,6 @@ __metadata: languageName: node linkType: hard -"classic-level@npm:^1.2.0": - version: 1.3.0 - resolution: "classic-level@npm:1.3.0" - dependencies: - abstract-level: ^1.0.2 - catering: ^2.1.0 - module-error: ^1.0.1 - napi-macros: ^2.2.2 - node-gyp: latest - node-gyp-build: ^4.3.0 - checksum: 773da48aef52a041115d413fee8340b357a4da2eb505764f327183b155edd7cc9d24819eb4f707c83dbdae8588024f5dddeb322125567c59d5d1f6f16334cdb9 - languageName: node - linkType: hard - "clean-css@npm:^5.2.2, clean-css@npm:^5.3.0, clean-css@npm:^5.3.2, clean-css@npm:~5.3.2": version: 5.3.3 resolution: "clean-css@npm:5.3.3" @@ -8882,13 +8517,6 @@ __metadata: languageName: node linkType: hard -"commander@npm:3.0.2": - version: 3.0.2 - resolution: "commander@npm:3.0.2" - checksum: 6d14ad030d1904428139487ed31febcb04c1604db2b8d9fae711f60ee6718828dc0e11602249e91c8a97b0e721e9c6d53edbc166bad3cde1596851d59a8f824d - languageName: node - linkType: hard - "commander@npm:^10.0.0, commander@npm:^10.0.1": version: 10.0.1 resolution: "commander@npm:10.0.1" @@ -8924,7 +8552,7 @@ __metadata: languageName: node linkType: hard -"commander@npm:^8.3.0": +"commander@npm:^8.1.0, commander@npm:^8.3.0": version: 8.3.0 resolution: "commander@npm:8.3.0" checksum: 0f82321821fc27b83bd409510bb9deeebcfa799ff0bf5d102128b500b7af22872c0c92cb6a0ebc5a4cf19c6b550fba9cedfa7329d18c6442a625f851377bacf0 @@ -9232,15 +8860,6 @@ __metadata: languageName: node linkType: hard -"crc-32@npm:^1.2.0": - version: 1.2.2 - resolution: "crc-32@npm:1.2.2" - bin: - crc32: bin/crc32.njs - checksum: ad2d0ad0cbd465b75dcaeeff0600f8195b686816ab5f3ba4c6e052a07f728c3e70df2e3ca9fd3d4484dc4ba70586e161ca5a2334ec8bf5a41bf022a6103ff243 - languageName: node - linkType: hard - "create-hash@npm:^1.1.0, create-hash@npm:^1.1.2, create-hash@npm:^1.2.0": version: 1.2.0 resolution: "create-hash@npm:1.2.0" @@ -9670,7 +9289,7 @@ __metadata: languageName: node linkType: hard -"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.2.0, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": +"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.2.0, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4": version: 4.3.4 resolution: "debug@npm:4.3.4" dependencies: @@ -10923,44 +10542,6 @@ __metadata: languageName: node linkType: hard -"ethers@npm:^5.7.1": - version: 5.7.2 - resolution: "ethers@npm:5.7.2" - dependencies: - "@ethersproject/abi": 5.7.0 - "@ethersproject/abstract-provider": 5.7.0 - "@ethersproject/abstract-signer": 5.7.0 - "@ethersproject/address": 5.7.0 - "@ethersproject/base64": 5.7.0 - "@ethersproject/basex": 5.7.0 - "@ethersproject/bignumber": 5.7.0 - "@ethersproject/bytes": 5.7.0 - "@ethersproject/constants": 5.7.0 - "@ethersproject/contracts": 5.7.0 - "@ethersproject/hash": 5.7.0 - "@ethersproject/hdnode": 5.7.0 - "@ethersproject/json-wallets": 5.7.0 - "@ethersproject/keccak256": 5.7.0 - "@ethersproject/logger": 5.7.0 - "@ethersproject/networks": 5.7.1 - "@ethersproject/pbkdf2": 5.7.0 - "@ethersproject/properties": 5.7.0 - "@ethersproject/providers": 5.7.2 - "@ethersproject/random": 5.7.0 - "@ethersproject/rlp": 5.7.0 - "@ethersproject/sha2": 5.7.0 - "@ethersproject/signing-key": 5.7.0 - "@ethersproject/solidity": 5.7.0 - "@ethersproject/strings": 5.7.0 - "@ethersproject/transactions": 5.7.0 - "@ethersproject/units": 5.7.0 - "@ethersproject/wallet": 5.7.0 - "@ethersproject/web": 5.7.1 - "@ethersproject/wordlists": 5.7.0 - checksum: b7c08cf3e257185a7946117dbbf764433b7ba0e77c27298dec6088b3bc871aff711462b0621930c56880ff0a7ceb8b1d3a361ffa259f93377b48e34107f62553 - languageName: node - linkType: hard - "ethers@npm:^6.7.1": version: 6.9.0 resolution: "ethers@npm:6.9.0" @@ -11558,19 +11139,6 @@ __metadata: languageName: node linkType: hard -"fs-extra@npm:^0.30.0": - version: 0.30.0 - resolution: "fs-extra@npm:0.30.0" - dependencies: - graceful-fs: ^4.1.2 - jsonfile: ^2.1.0 - klaw: ^1.0.0 - path-is-absolute: ^1.0.0 - rimraf: ^2.2.8 - checksum: 6edfd65fc813baa27f1603778c0f5ec11f8c5006a20b920437813ee2023eba18aeec8bef1c89b2e6c84f9fc90fdc7c916f4a700466c8c69d22a35d018f2570f0 - languageName: node - linkType: hard - "fs-extra@npm:^10.0.0, fs-extra@npm:^10.1.0": version: 10.1.0 resolution: "fs-extra@npm:10.1.0" @@ -11704,13 +11272,6 @@ __metadata: languageName: node linkType: hard -"functional-red-black-tree@npm:^1.0.1": - version: 1.0.1 - resolution: "functional-red-black-tree@npm:1.0.1" - checksum: ca6c170f37640e2d94297da8bb4bf27a1d12bea3e00e6a3e007fd7aa32e37e000f5772acf941b4e4f3cf1c95c3752033d0c509af157ad8f526e7f00723b9eb9f - languageName: node - linkType: hard - "fwd-stream@npm:^1.0.4": version: 1.0.4 resolution: "fwd-stream@npm:1.0.4" @@ -12051,7 +11612,7 @@ __metadata: languageName: node linkType: hard -"graceful-fs@npm:^4.1.11, graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": +"graceful-fs@npm:^4.1.11, graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": version: 4.2.11 resolution: "graceful-fs@npm:4.2.11" checksum: ac85f94da92d8eb6b7f5a8b20ce65e43d66761c55ce85ac96df6865308390da45a8d3f0296dd3a663de65d30ba497bd46c696cc1e248c72b13d6d567138a4fc7 @@ -12093,22 +11654,16 @@ __metadata: languageName: node linkType: hard -"hardhat@npm:^2.17.4": - version: 2.19.2 - resolution: "hardhat@npm:2.19.2" +"hardhat@npm:^2.22.6": + version: 2.22.6 + resolution: "hardhat@npm:2.22.6" dependencies: "@ethersproject/abi": ^5.1.2 "@metamask/eth-sig-util": ^4.0.0 - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-blockchain": 7.0.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-evm": 2.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-statemanager": 2.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - "@nomicfoundation/ethereumjs-vm": 7.0.2 + "@nomicfoundation/edr": ^0.4.1 + "@nomicfoundation/ethereumjs-common": 4.0.4 + "@nomicfoundation/ethereumjs-tx": 5.0.4 + "@nomicfoundation/ethereumjs-util": 9.0.4 "@nomicfoundation/solidity-analyzer": ^0.1.0 "@sentry/node": ^5.18.1 "@types/bn.js": ^5.1.0 @@ -12116,6 +11671,7 @@ __metadata: adm-zip: ^0.4.16 aggregate-error: ^3.0.0 ansi-escapes: ^4.3.0 + boxen: ^5.1.2 chalk: ^2.4.2 chokidar: ^3.4.0 ci-info: ^2.0.0 @@ -12138,7 +11694,7 @@ __metadata: raw-body: ^2.4.1 resolve: 1.17.0 semver: ^6.3.0 - solc: 0.7.3 + solc: 0.8.26 source-map-support: ^0.5.13 stacktrace-parser: ^0.1.10 tsort: 0.0.1 @@ -12155,7 +11711,7 @@ __metadata: optional: true bin: hardhat: internal/cli/bootstrap.js - checksum: 0b5499890e46750ca8c51bbe1205599b1424a2e5293b40c9f7cb56320d56b9935fbd4e276de370e07664ae81fa57dc7ab227bf2b2363f5732ef9f06df1a9a6d9 + checksum: 5aec1824db3575d63754de18c2629bcd820bc836d836f8a6346bcd9aa2ae4c397e090c43ea482ee765b704e018001015b5c84c5ded301a6a1144129c1a4c509b languageName: node linkType: hard @@ -13120,7 +12676,7 @@ __metadata: eslint: ^8.57.0 eslint-plugin-prettier: ^5.1.3 ethers: ^6.7.1 - hardhat: ^2.17.4 + hardhat: ^2.22.6 prettier: 3.2.5 smol-toml: ^1.1.2 toml: ^3.0.0 @@ -13248,7 +12804,7 @@ __metadata: languageName: node linkType: hard -"is-buffer@npm:^2.0.0, is-buffer@npm:^2.0.5": +"is-buffer@npm:^2.0.0": version: 2.0.5 resolution: "is-buffer@npm:2.0.5" checksum: 764c9ad8b523a9f5a32af29bdf772b08eb48c04d2ad0a7240916ac2688c983bf5f8504bf25b35e66240edeb9d9085461f9b5dae1f3d2861c6b06a65fe983de42 @@ -13801,13 +13357,6 @@ __metadata: languageName: node linkType: hard -"js-sdsl@npm:^4.1.4": - version: 4.4.2 - resolution: "js-sdsl@npm:4.4.2" - checksum: ba705adc1788bf3c6f6c8e5077824f2bb4f0acab5a984420ce5cc492c7fff3daddc26335ad2c9a67d4f5e3241ec790f9e5b72a625adcf20cf321d2fd85e62b8b - languageName: node - linkType: hard - "js-sha3@npm:0.8.0": version: 0.8.0 resolution: "js-sha3@npm:0.8.0" @@ -13944,18 +13493,6 @@ __metadata: languageName: node linkType: hard -"jsonfile@npm:^2.1.0": - version: 2.4.0 - resolution: "jsonfile@npm:2.4.0" - dependencies: - graceful-fs: ^4.1.6 - dependenciesMeta: - graceful-fs: - optional: true - checksum: f5064aabbc9e35530dc471d8b203ae1f40dbe949ddde4391c6f6a6d310619a15f0efdae5587df594d1d70c555193aaeee9d2ed4aec9ffd5767bd5e4e62d49c3d - languageName: node - linkType: hard - "jsonfile@npm:^4.0.0": version: 4.0.0 resolution: "jsonfile@npm:4.0.0" @@ -14054,18 +13591,6 @@ __metadata: languageName: node linkType: hard -"klaw@npm:^1.0.0": - version: 1.3.1 - resolution: "klaw@npm:1.3.1" - dependencies: - graceful-fs: ^4.1.9 - dependenciesMeta: - graceful-fs: - optional: true - checksum: 8f69e4797c26e7c3f2426bfa85f38a3da3c2cb1b4c6bd850d2377aed440d41ce9d806f2885c2e2e224372c56af4b1d43b8a499adecf9a05e7373dc6b8b7c52e4 - languageName: node - linkType: hard - "kleur@npm:^3.0.3": version: 3.0.3 resolution: "kleur@npm:3.0.3" @@ -14267,33 +13792,6 @@ __metadata: languageName: node linkType: hard -"level-supports@npm:^4.0.0": - version: 4.0.1 - resolution: "level-supports@npm:4.0.1" - checksum: d4552b42bb8cdeada07b0f6356c7a90fefe76279147331f291aceae26e3e56d5f927b09ce921647c0230bfe03ddfbdcef332be921e5c2194421ae2bfa3cf6368 - languageName: node - linkType: hard - -"level-transcoder@npm:^1.0.1": - version: 1.0.1 - resolution: "level-transcoder@npm:1.0.1" - dependencies: - buffer: ^6.0.3 - module-error: ^1.0.1 - checksum: 304f08d802faf3491a533b6d87ad8be3cabfd27f2713bbe9d4c633bf50fcb9460eab5a6776bf015e101ead7ba1c1853e05e7f341112f17a9d0cb37ee5a421a25 - languageName: node - linkType: hard - -"level@npm:^8.0.0": - version: 8.0.0 - resolution: "level@npm:8.0.0" - dependencies: - browser-level: ^1.0.1 - classic-level: ^1.2.0 - checksum: 13eb25bd71bfdca6cd714d1233adf9da97de9a8a4bf9f28d62a390b5c96d0250abaf983eb90eb8c4e89c7a985bb330750683d106f12670e5ea8fba1d7e608a1f - languageName: node - linkType: hard - "levelup@npm:^0.18.2": version: 0.18.6 resolution: "levelup@npm:0.18.6" @@ -14719,13 +14217,6 @@ __metadata: languageName: node linkType: hard -"mcl-wasm@npm:^0.7.1": - version: 0.7.9 - resolution: "mcl-wasm@npm:0.7.9" - checksum: 6b6ed5084156b98b2db70b223e1ba2c01953970b48a2e0c4ea3eeb9296610e6b3bfb2a2cce9e92e2d7ad61778b5f5a630e705e663835e915ba188c174a0a37fa - languageName: node - linkType: hard - "md5.js@npm:^1.3.4": version: 1.3.5 resolution: "md5.js@npm:1.3.5" @@ -15087,17 +14578,6 @@ __metadata: languageName: node linkType: hard -"memory-level@npm:^1.0.0": - version: 1.0.0 - resolution: "memory-level@npm:1.0.0" - dependencies: - abstract-level: ^1.0.0 - functional-red-black-tree: ^1.0.1 - module-error: ^1.0.1 - checksum: 80b1b7aedaf936e754adbcd7b9303018c3684fb32f9992fd967c448f145d177f16c724fbba9ed3c3590a9475fd563151eae664d69b83d2ad48714852e9fc5c72 - languageName: node - linkType: hard - "memorystream@npm:^0.3.1": version: 0.3.1 resolution: "memorystream@npm:0.3.1" @@ -15945,13 +15425,6 @@ __metadata: languageName: node linkType: hard -"module-error@npm:^1.0.1, module-error@npm:^1.0.2": - version: 1.0.2 - resolution: "module-error@npm:1.0.2" - checksum: 5d653e35bd55b3e95f8aee2cdac108082ea892e71b8f651be92cde43e4ee86abee4fa8bd7fc3fe5e68b63926d42f63c54cd17b87a560c31f18739295575a3962 - languageName: node - linkType: hard - "mrmime@npm:^1.0.0": version: 1.0.1 resolution: "mrmime@npm:1.0.1" @@ -16017,13 +15490,6 @@ __metadata: languageName: node linkType: hard -"napi-macros@npm:^2.2.2": - version: 2.2.2 - resolution: "napi-macros@npm:2.2.2" - checksum: c6f9bd71cdbbc37ddc3535aa5be481238641d89585b8a3f4d301cb89abf459e2d294810432bb7d12056d1f9350b1a0899a5afcf460237a3da6c398cf0fec7629 - languageName: node - linkType: hard - "natural-compare@npm:^1.4.0": version: 1.4.0 resolution: "natural-compare@npm:1.4.0" @@ -16126,7 +15592,7 @@ __metadata: languageName: node linkType: hard -"node-gyp-build@npm:^4.2.0, node-gyp-build@npm:^4.3.0": +"node-gyp-build@npm:^4.2.0": version: 4.7.1 resolution: "node-gyp-build@npm:4.7.1" bin: @@ -17686,7 +17152,7 @@ __metadata: languageName: node linkType: hard -"queue-microtask@npm:^1.2.2, queue-microtask@npm:^1.2.3": +"queue-microtask@npm:^1.2.2": version: 1.2.3 resolution: "queue-microtask@npm:1.2.3" checksum: b676f8c040cdc5b12723ad2f91414d267605b26419d5c821ff03befa817ddd10e238d22b25d604920340fd73efd8ba795465a0377c4adf45a4a41e4234e42dc4 @@ -18421,7 +17887,7 @@ __metadata: languageName: node linkType: hard -"require-from-string@npm:^2.0.0, require-from-string@npm:^2.0.2": +"require-from-string@npm:^2.0.2": version: 2.0.2 resolution: "require-from-string@npm:2.0.2" checksum: a03ef6895445f33a4015300c426699bc66b2b044ba7b670aa238610381b56d3f07c686251740d575e22f4c87531ba662d06937508f0f3c0f1ddc04db3130560b @@ -18589,17 +18055,6 @@ __metadata: languageName: node linkType: hard -"rimraf@npm:^2.2.8": - version: 2.7.1 - resolution: "rimraf@npm:2.7.1" - dependencies: - glob: ^7.1.3 - bin: - rimraf: ./bin.js - checksum: cdc7f6eacb17927f2a075117a823e1c5951792c6498ebcce81ca8203454a811d4cf8900314154d3259bb8f0b42ab17f67396a8694a54cae3283326e57ad250cd - languageName: node - linkType: hard - "rimraf@npm:^3.0.2": version: 3.0.2 resolution: "rimraf@npm:3.0.2" @@ -18725,15 +18180,6 @@ __metadata: languageName: node linkType: hard -"run-parallel-limit@npm:^1.1.0": - version: 1.1.0 - resolution: "run-parallel-limit@npm:1.1.0" - dependencies: - queue-microtask: ^1.2.2 - checksum: 672c3b87e7f939c684b9965222b361421db0930223ed1e43ebf0e7e48ccc1a022ea4de080bef4d5468434e2577c33b7681e3f03b7593fdc49ad250a55381123c - languageName: node - linkType: hard - "run-parallel@npm:^1.1.9": version: 1.2.0 resolution: "run-parallel@npm:1.2.0" @@ -18743,13 +18189,6 @@ __metadata: languageName: node linkType: hard -"rustbn.js@npm:~0.2.0": - version: 0.2.0 - resolution: "rustbn.js@npm:0.2.0" - checksum: 2148e7ba34e70682907ee29df4784639e6eb025481b2c91249403b7ec57181980161868d9aa24822a5075dd1bb5a180dfedc77309e5f0d27b6301f9b563af99a - languageName: node - linkType: hard - "rxjs@npm:^7.5.4": version: 7.8.1 resolution: "rxjs@npm:7.8.1" @@ -18841,7 +18280,7 @@ __metadata: languageName: node linkType: hard -"scrypt-js@npm:3.0.1, scrypt-js@npm:^3.0.0": +"scrypt-js@npm:^3.0.0": version: 3.0.1 resolution: "scrypt-js@npm:3.0.1" checksum: b7c7d1a68d6ca946f2fbb0778e0c4ec63c65501b54023b2af7d7e9f48fdb6c6580d6f7675cd53bda5944c5ebc057560d5a6365079752546865defb3b79dea454 @@ -19333,22 +18772,20 @@ __metadata: languageName: node linkType: hard -"solc@npm:0.7.3": - version: 0.7.3 - resolution: "solc@npm:0.7.3" +"solc@npm:0.8.26": + version: 0.8.26 + resolution: "solc@npm:0.8.26" dependencies: command-exists: ^1.2.8 - commander: 3.0.2 + commander: ^8.1.0 follow-redirects: ^1.12.1 - fs-extra: ^0.30.0 js-sha3: 0.8.0 memorystream: ^0.3.1 - require-from-string: ^2.0.0 semver: ^5.5.0 tmp: 0.0.33 bin: - solcjs: solcjs - checksum: 2d8eb16c6d8f648213c94dc8d977cffe5099cba7d41c82d92d769ef71ae8320a985065ce3d6c306440a85f8e8d2b27fb30bdd3ac38f69e5c1fa0ab8a3fb2f217 + solcjs: solc.js + checksum: e3eaeac76e60676377b357af8f3919d4c8c6a74b74112b49279fe8c74a3dfa1de8afe4788689fc307453bde336edc8572988d2cf9e909f84d870420eb640400c languageName: node linkType: hard @@ -21629,21 +21066,6 @@ __metadata: languageName: node linkType: hard -"ws@npm:7.4.6": - version: 7.4.6 - resolution: "ws@npm:7.4.6" - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - checksum: 3a990b32ed08c72070d5e8913e14dfcd831919205be52a3ff0b4cdd998c8d554f167c9df3841605cde8b11d607768cacab3e823c58c96a5c08c987e093eb767a - languageName: node - linkType: hard - "ws@npm:8.16.0, ws@npm:^8.16.0": version: 8.16.0 resolution: "ws@npm:8.16.0" From 863d96d1d13b48e3baa0e28bf8d5505c4f4c0fcd Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Tue, 23 Jul 2024 21:37:03 +0000 Subject: [PATCH 2/4] remove duplicate multi_scalar_mul_slice --- noir_stdlib/src/embedded_curve_ops.nr | 3 --- 1 file changed, 3 deletions(-) diff --git a/noir_stdlib/src/embedded_curve_ops.nr b/noir_stdlib/src/embedded_curve_ops.nr index 9324802c2f8..6b70b6ddef0 100644 --- a/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir_stdlib/src/embedded_curve_ops.nr @@ -105,9 +105,6 @@ fn multi_scalar_mul_array_return(points: [EmbeddedCurvePoint; N], sc #[foreign(multi_scalar_mul)] pub(crate) fn multi_scalar_mul_slice(points: [EmbeddedCurvePoint], scalars: [EmbeddedCurveScalar]) -> [Field; 3] {} -#[foreign(multi_scalar_mul)] -pub(crate) fn multi_scalar_mul_slice(points: [EmbeddedCurvePoint], scalars: [EmbeddedCurveScalar]) -> [Field; 3] {} - // docs:start:fixed_base_scalar_mul pub fn fixed_base_scalar_mul(scalar: EmbeddedCurveScalar) -> EmbeddedCurvePoint // docs:end:fixed_base_scalar_mul From 3a3831e82dd52733c2b07980db065cf4fc8c1124 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Fri, 26 Jul 2024 16:02:46 +0000 Subject: [PATCH 3/4] chore: bump bb --- scripts/install_bb.sh | 2 +- tooling/noir_js_backend_barretenberg/package.json | 2 +- yarn.lock | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh index 95dcfdda880..8a9ae768674 100755 --- a/scripts/install_bb.sh +++ b/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.46.1" +VERSION="0.47.0" BBUP_PATH=~/.bb/bbup diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 06c40d59a6a..b770b4f65fd 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -41,7 +41,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.46.1", + "@aztec/bb.js": "0.47.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/yarn.lock b/yarn.lock index 5a442d77b30..1a21dcff815 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.46.1": - version: 0.46.1 - resolution: "@aztec/bb.js@npm:0.46.1" +"@aztec/bb.js@npm:0.47.0": + version: 0.47.0 + resolution: "@aztec/bb.js@npm:0.47.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: 9475388f994e430ab3282a2c9769cd116f334358049955ed520f467d1abec8237bfeae7fa2fed9cd292f24c09c466e32e2af2d0a5cec2d10cc0c727728d96b0d + checksum: e59261fd08fefece756b6ee469bd5756347aa657f36a6b245fa18a6985016d117a5f2c409b49fefe19d1a7324e138c62a390298bc7599c11e8f807c93df84b20 languageName: node linkType: hard @@ -4161,7 +4161,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.46.1 + "@aztec/bb.js": 0.47.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From c7559b2277a21a289b31a1f5247c2b50ad310513 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Tue, 30 Jul 2024 15:24:25 +0000 Subject: [PATCH 4/4] bump bb to 0.47.1 now that releases have been fixed --- scripts/install_bb.sh | 2 +- tooling/noir_js_backend_barretenberg/package.json | 2 +- yarn.lock | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh index 8a9ae768674..65a449be543 100755 --- a/scripts/install_bb.sh +++ b/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.47.0" +VERSION="0.47.1" BBUP_PATH=~/.bb/bbup diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index b770b4f65fd..aeca5fe543f 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -41,7 +41,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.47.0", + "@aztec/bb.js": "0.47.1", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/yarn.lock b/yarn.lock index 1a21dcff815..40d6ccc55e6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.47.0": - version: 0.47.0 - resolution: "@aztec/bb.js@npm:0.47.0" +"@aztec/bb.js@npm:0.47.1": + version: 0.47.1 + resolution: "@aztec/bb.js@npm:0.47.1" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: e59261fd08fefece756b6ee469bd5756347aa657f36a6b245fa18a6985016d117a5f2c409b49fefe19d1a7324e138c62a390298bc7599c11e8f807c93df84b20 + checksum: fa06d2ab58b2a23bacc578df7654f5c7eb90553229fc9730aaaf7479bc96b39f10f24a4f3a7eae8f73df3cdd8a3ffb07627cad61dff9896cabdb275ce5b6f09b languageName: node linkType: hard @@ -4161,7 +4161,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.47.0 + "@aztec/bb.js": 0.47.1 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3