From 230709dc0576d9f8e368ca4cb80ff8a80fec1a68 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Tue, 12 Mar 2024 18:16:07 +0000 Subject: [PATCH] feat: Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5134) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE chore: generalise `FunctionVisibility` to `ItemVisibility` (https://github.com/noir-lang/noir/pull/4495) fix: Dynamic assert messages in brillig (https://github.com/noir-lang/noir/pull/4531) chore: organize the `blackbox_solver` crate (https://github.com/noir-lang/noir/pull/4519) fix(acir_gen): More granular element sizes array check (https://github.com/noir-lang/noir/pull/4528) chore: Release Noir(0.25.0) (https://github.com/noir-lang/noir/pull/4352) chore: document big integers (https://github.com/noir-lang/noir/pull/4487) fix: Add `follow_bindings` to follow `Type::Alias` links (https://github.com/noir-lang/noir/pull/4521) fix: Fix brillig slowdown when assigning arrays in loops (https://github.com/noir-lang/noir/pull/4472) chore: Move `check_method_signatures` to type checking phase (https://github.com/noir-lang/noir/pull/4516) chore(ci): fix JS publishing workflow checking out inconsistent commits (https://github.com/noir-lang/noir/pull/4493) fix(ssa): Handle mergers of slices returned from calls (https://github.com/noir-lang/noir/pull/4496) chore: Add HashMap docs (https://github.com/noir-lang/noir/pull/4457) chore: custom hash for eddsa (https://github.com/noir-lang/noir/pull/4440) chore: update various dependencies (https://github.com/noir-lang/noir/pull/4513) fix: Allow type aliases in main (https://github.com/noir-lang/noir/pull/4505) chore: add `ModuleDeclaration` struct (https://github.com/noir-lang/noir/pull/4512) fix: Force src impl for == on slices (https://github.com/noir-lang/noir/pull/4507) chore: pass `import_directive` by reference (https://github.com/noir-lang/noir/pull/4511) feat: Track stack frames and their variables in the debugger (https://github.com/noir-lang/noir/pull/4188) chore: add regression test for issue 4449 (https://github.com/noir-lang/noir/pull/4503) chore: pass macro processors by reference (https://github.com/noir-lang/noir/pull/4501) chore: bump bb to 0.26.3 (https://github.com/noir-lang/noir/pull/4488) fix: handling of gh deps in noir_wasm (https://github.com/noir-lang/noir/pull/4499) fix: iterative flattening pass (https://github.com/noir-lang/noir/pull/4492) chore: Move templated code for assert_message into the stdlib (https://github.com/noir-lang/noir/pull/4475) chore: pull out separate function for compiling and running a test chore: update cargo deny config (https://github.com/noir-lang/noir/pull/4486) feat: run tests in parallel in `nargo test` (https://github.com/noir-lang/noir/pull/4484) END_COMMIT_OVERRIDE --------- Co-authored-by: TomAFrench Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .aztec-sync-commit | 2 +- .github/JS_PUBLISH_FAILED.md | 2 +- .github/workflows/publish-es-packages.yml | 8 +- .release-please-manifest.json | 4 +- CHANGELOG.md | 77 + Cargo.lock | 322 ++- Cargo.toml | 21 +- acvm-repo/CHANGELOG.md | 64 + acvm-repo/acir/Cargo.toml | 2 +- acvm-repo/acir_field/Cargo.toml | 2 +- acvm-repo/acvm/Cargo.toml | 2 +- acvm-repo/acvm_js/Cargo.toml | 2 +- acvm-repo/acvm_js/package.json | 2 +- acvm-repo/blackbox_solver/Cargo.toml | 2 +- acvm-repo/blackbox_solver/src/ecdsa/mod.rs | 22 + .../blackbox_solver/src/ecdsa/secp256k1.rs | 137 ++ .../blackbox_solver/src/ecdsa/secp256r1.rs | 133 ++ acvm-repo/blackbox_solver/src/hash.rs | 126 ++ acvm-repo/blackbox_solver/src/lib.rs | 446 +---- acvm-repo/bn254_blackbox_solver/Cargo.toml | 4 +- acvm-repo/brillig/Cargo.toml | 2 +- acvm-repo/brillig_vm/Cargo.toml | 2 +- aztec_macros/src/lib.rs | 1781 +---------------- .../compute_note_hash_and_nullifier.rs | 195 ++ aztec_macros/src/transforms/events.rs | 178 ++ aztec_macros/src/transforms/functions.rs | 707 +++++++ aztec_macros/src/transforms/mod.rs | 4 + aztec_macros/src/transforms/storage.rs | 346 ++++ aztec_macros/src/utils/ast_utils.rs | 183 ++ aztec_macros/src/utils/checks.rs | 22 + aztec_macros/src/utils/constants.rs | 3 + aztec_macros/src/utils/errors.rs | 69 + aztec_macros/src/utils/hir_utils.rs | 118 ++ aztec_macros/src/utils/mod.rs | 5 + compiler/noirc_driver/Cargo.toml | 1 - compiler/noirc_driver/src/contract.rs | 31 +- compiler/noirc_driver/src/lib.rs | 30 +- compiler/noirc_errors/src/debug_info.rs | 14 +- .../src/brillig/brillig_gen/brillig_block.rs | 32 +- .../noirc_evaluator/src/brillig/brillig_ir.rs | 39 +- compiler/noirc_evaluator/src/ssa.rs | 3 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 8 +- .../src/ssa/function_builder/mod.rs | 19 +- .../src/ssa/opt/flatten_cfg.rs | 543 ++--- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 32 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 5 + compiler/noirc_frontend/src/ast/expression.rs | 25 +- compiler/noirc_frontend/src/ast/mod.rs | 4 +- compiler/noirc_frontend/src/ast/statement.rs | 11 + compiler/noirc_frontend/src/debug/mod.rs | 104 +- .../src/hir/def_collector/dc_crate.rs | 191 +- .../src/hir/def_collector/dc_mod.rs | 57 +- .../src/hir/def_collector/errors.rs | 20 - .../src/hir/def_map/item_scope.rs | 13 +- .../noirc_frontend/src/hir/def_map/mod.rs | 12 +- .../src/hir/def_map/namespace.rs | 11 +- .../src/hir/resolution/errors.rs | 19 - .../src/hir/resolution/import.rs | 6 +- .../src/hir/resolution/resolver.rs | 157 +- .../src/hir/resolution/traits.rs | 17 +- .../src/hir/type_check/errors.rs | 37 +- .../noirc_frontend/src/hir/type_check/expr.rs | 12 +- .../noirc_frontend/src/hir/type_check/mod.rs | 177 +- .../noirc_frontend/src/hir_def/function.rs | 14 +- compiler/noirc_frontend/src/hir_def/types.rs | 16 +- compiler/noirc_frontend/src/lexer/token.rs | 3 - compiler/noirc_frontend/src/lib.rs | 17 +- .../src/monomorphization/ast.rs | 5 +- .../src/monomorphization/debug.rs | 2 +- .../src/monomorphization/debug_types.rs | 54 +- .../src/monomorphization/mod.rs | 25 +- compiler/noirc_frontend/src/node_interner.rs | 27 +- compiler/noirc_frontend/src/parser/mod.rs | 18 +- compiler/noirc_frontend/src/parser/parser.rs | 10 +- .../src/parser/parser/function.rs | 22 +- .../src/parser/parser/traits.rs | 10 +- compiler/noirc_frontend/src/tests.rs | 27 +- compiler/noirc_printable_type/src/lib.rs | 8 +- compiler/wasm/package.json | 2 +- compiler/wasm/src/noir/package.ts | 7 +- compiler/wasm/src/types/noir_artifact.ts | 11 +- .../wasm/test/fixtures/deps/lib-c/src/lib.nr | 2 +- .../test/fixtures/deps/lib-c/src/module.nr | 2 +- .../fixtures/deps/lib-c/src/module/foo.nr | 2 +- .../test/fixtures/noir-contract/src/main.nr | 3 +- cspell.json | 2 + deny.toml | 13 +- docs/.markdownlint.json | 3 + docs/docs/noir/standard_library/bigint.md | 102 + .../standard_library/containers/hashmap.md | 270 +++ .../cryptographic_primitives/eddsa.mdx | 8 + docs/scripts/codegen_nargo_reference.sh | 2 +- .../explainers/explainer-oracle.md | 57 + .../explainers/explainer-recursion.md | 176 ++ .../getting_started/_category_.json | 5 + .../hello_noir/_category_.json | 5 + .../getting_started/hello_noir/index.md | 142 ++ .../hello_noir/project_breakdown.md | 199 ++ .../installation/_category_.json | 6 + .../getting_started/installation/index.md | 48 + .../installation/other_install_methods.md | 254 +++ .../getting_started/tooling/_category_.json | 6 + .../getting_started/tooling/index.mdx | 38 + .../tooling/language_server.md | 43 + .../getting_started/tooling/testing.md | 62 + .../version-v0.25.0/how_to/_category_.json | 5 + .../version-v0.25.0/how_to/how-to-oracles.md | 280 +++ .../how_to/how-to-recursion.md | 179 ++ .../how_to/how-to-solidity-verifier.md | 231 +++ .../version-v0.25.0/how_to/merkle-proof.mdx | 48 + .../how_to/using-devcontainers.mdx | 110 + docs/versioned_docs/version-v0.25.0/index.mdx | 67 + .../version-v0.25.0/migration_notes.md | 105 + .../noir/concepts/_category_.json | 6 + .../version-v0.25.0/noir/concepts/assert.md | 45 + .../version-v0.25.0/noir/concepts/comments.md | 33 + .../noir/concepts/control_flow.md | 45 + .../version-v0.25.0/noir/concepts/data_bus.md | 21 + .../noir/concepts/data_types/_category_.json | 5 + .../noir/concepts/data_types/arrays.md | 251 +++ .../noir/concepts/data_types/booleans.md | 31 + .../noir/concepts/data_types/fields.md | 192 ++ .../concepts/data_types/function_types.md | 26 + .../noir/concepts/data_types/index.md | 110 + .../noir/concepts/data_types/integers.md | 155 ++ .../noir/concepts/data_types/references.md | 23 + .../noir/concepts/data_types/slices.mdx | 147 ++ .../noir/concepts/data_types/strings.md | 80 + .../noir/concepts/data_types/structs.md | 70 + .../noir/concepts/data_types/tuples.md | 48 + .../version-v0.25.0/noir/concepts/distinct.md | 64 + .../noir/concepts/functions.md | 226 +++ .../version-v0.25.0/noir/concepts/generics.md | 106 + .../version-v0.25.0/noir/concepts/globals.md | 72 + .../version-v0.25.0/noir/concepts/lambdas.md | 81 + .../noir/concepts/mutability.md | 121 ++ .../version-v0.25.0/noir/concepts/ops.md | 98 + .../version-v0.25.0/noir/concepts/oracles.md | 23 + .../noir/concepts/shadowing.md | 44 + .../version-v0.25.0/noir/concepts/traits.md | 389 ++++ .../noir/concepts/unconstrained.md | 95 + .../modules_packages_crates/_category_.json | 6 + .../crates_and_packages.md | 43 + .../modules_packages_crates/dependencies.md | 124 ++ .../noir/modules_packages_crates/modules.md | 105 + .../modules_packages_crates/workspaces.md | 40 + .../noir/standard_library/_category_.json | 6 + .../noir/standard_library/black_box_fns.md | 31 + .../noir/standard_library/bn254.md | 46 + .../standard_library/containers/boundedvec.md | 326 +++ .../standard_library/containers/hashmap.md | 569 ++++++ .../noir/standard_library/containers/index.md | 5 + .../noir/standard_library/containers/vec.mdx | 151 ++ .../cryptographic_primitives/_category_.json | 5 + .../cryptographic_primitives/ec_primitives.md | 102 + .../ecdsa_sig_verification.mdx | 60 + .../cryptographic_primitives/eddsa.mdx | 37 + .../cryptographic_primitives/hashes.mdx | 251 +++ .../cryptographic_primitives/index.md | 14 + .../cryptographic_primitives/scalar.mdx | 33 + .../cryptographic_primitives/schnorr.mdx | 45 + .../noir/standard_library/logging.md | 78 + .../noir/standard_library/merkle_trees.md | 58 + .../noir/standard_library/options.md | 101 + .../noir/standard_library/recursion.md | 88 + .../noir/standard_library/traits.md | 399 ++++ .../noir/standard_library/zeroed.md | 25 + .../version-v0.25.0/reference/_category_.json | 5 + .../reference/nargo_commands.md | 380 ++++ .../version-v0.25.0/tutorials/noirjs_app.md | 279 +++ .../version-v0.25.0-sidebars.json | 83 + flake.nix | 2 +- noir_stdlib/src/bigint.nr | 2 + noir_stdlib/src/collections/map.nr | 112 +- noir_stdlib/src/eddsa.nr | 31 +- noir_stdlib/src/hash/mimc.nr | 50 +- noir_stdlib/src/hash/poseidon.nr | 76 + noir_stdlib/src/hash/poseidon2.nr | 39 +- noir_stdlib/src/internal.nr | 12 + noir_stdlib/src/lib.nr | 1 + noirc_macros/Cargo.toml | 14 - noirc_macros/src/lib.rs | 73 - .../assert_msg_runtime/src/main.nr | 2 +- .../brillig_assert_msg_runtime/src/main.nr | 2 +- .../brillig_mut_ref_from_acir/src/main.nr | 2 +- .../contract_with_impl/src/main.nr | 2 +- .../simple_contract/src/main.nr | 5 +- .../execution_success/bigint/src/main.nr | 12 + .../brillig_cow_assign/Nargo.toml | 7 + .../brillig_cow_assign/Prover.toml | 2 + .../brillig_cow_assign/src/main.nr | 23 + .../brillig_cow_regression/src/main.nr | 42 +- .../double_verify_proof/Nargo.toml | 4 +- .../double_verify_proof/src/main.nr | 1 - .../double_verify_proof_recursive/Nargo.toml | 5 + .../double_verify_proof_recursive/Prover.toml | 5 + .../double_verify_proof_recursive/src/main.nr | 29 + .../execution_success/eddsa/src/main.nr | 10 +- .../execution_success/hashmap/src/main.nr | 182 +- .../execution_success/hashmap/src/utils.nr | 12 +- .../poseidon_bn254_hash/src/main.nr | 2 +- .../regression_4449/Nargo.toml | 6 + .../regression_4449/Prover.toml | 3 + .../regression_4449/src/main.nr | 14 + .../execution_success/slices/src/main.nr | 33 + .../Nargo.toml | 5 + .../test_libraries/exporting_lib/src/lib.nr | 1 - tooling/acvm_cli/Cargo.toml | 38 + tooling/acvm_cli/src/cli/execute_cmd.rs | 78 + tooling/acvm_cli/src/cli/fs/inputs.rs | 54 + tooling/acvm_cli/src/cli/fs/mod.rs | 2 + tooling/acvm_cli/src/cli/fs/witness.rs | 36 + tooling/acvm_cli/src/cli/mod.rs | 41 + tooling/acvm_cli/src/errors.rs | 52 + tooling/acvm_cli/src/main.rs | 36 + tooling/bb_abstraction_leaks/build.rs | 2 +- tooling/debugger/ignored-tests.txt | 10 +- tooling/debugger/src/context.rs | 107 +- tooling/debugger/src/dap.rs | 111 +- tooling/debugger/src/foreign_calls.rs | 46 +- tooling/debugger/src/repl.rs | 14 +- tooling/debugger/src/source_code_printer.rs | 10 +- tooling/debugger/tests/debug.rs | 2 +- tooling/lsp/src/requests/test_run.rs | 2 +- tooling/nargo/src/artifacts/contract.rs | 10 +- tooling/nargo/src/artifacts/debug.rs | 10 +- tooling/nargo/src/artifacts/debug_vars.rs | 96 +- tooling/nargo/src/ops/test.rs | 14 +- tooling/nargo_cli/src/cli/test_cmd.rs | 139 +- tooling/noir_codegen/package.json | 2 +- tooling/noir_js/package.json | 2 +- .../noir_js_backend_barretenberg/package.json | 4 +- tooling/noir_js_types/package.json | 2 +- tooling/noirc_abi_wasm/package.json | 2 +- yarn.lock | 10 +- 235 files changed, 13555 insertions(+), 3680 deletions(-) create mode 100644 acvm-repo/blackbox_solver/src/ecdsa/mod.rs create mode 100644 acvm-repo/blackbox_solver/src/ecdsa/secp256k1.rs create mode 100644 acvm-repo/blackbox_solver/src/ecdsa/secp256r1.rs create mode 100644 acvm-repo/blackbox_solver/src/hash.rs create mode 100644 aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs create mode 100644 aztec_macros/src/transforms/events.rs create mode 100644 aztec_macros/src/transforms/functions.rs create mode 100644 aztec_macros/src/transforms/mod.rs create mode 100644 aztec_macros/src/transforms/storage.rs create mode 100644 aztec_macros/src/utils/ast_utils.rs create mode 100644 aztec_macros/src/utils/checks.rs create mode 100644 aztec_macros/src/utils/constants.rs create mode 100644 aztec_macros/src/utils/errors.rs create mode 100644 aztec_macros/src/utils/hir_utils.rs create mode 100644 aztec_macros/src/utils/mod.rs create mode 100644 docs/.markdownlint.json create mode 100644 docs/docs/noir/standard_library/bigint.md create mode 100644 docs/docs/noir/standard_library/containers/hashmap.md create mode 100644 docs/versioned_docs/version-v0.25.0/explainers/explainer-oracle.md create mode 100644 docs/versioned_docs/version-v0.25.0/explainers/explainer-recursion.md create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/index.md create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/project_breakdown.md create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/installation/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/installation/index.md create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/installation/other_install_methods.md create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/tooling/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/tooling/index.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/tooling/language_server.md create mode 100644 docs/versioned_docs/version-v0.25.0/getting_started/tooling/testing.md create mode 100644 docs/versioned_docs/version-v0.25.0/how_to/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/how_to/how-to-oracles.md create mode 100644 docs/versioned_docs/version-v0.25.0/how_to/how-to-recursion.md create mode 100644 docs/versioned_docs/version-v0.25.0/how_to/how-to-solidity-verifier.md create mode 100644 docs/versioned_docs/version-v0.25.0/how_to/merkle-proof.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/how_to/using-devcontainers.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/index.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/migration_notes.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/assert.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/comments.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/control_flow.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_bus.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/arrays.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/booleans.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/fields.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/function_types.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/index.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/integers.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/references.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/slices.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/strings.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/structs.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/tuples.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/distinct.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/functions.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/generics.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/globals.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/lambdas.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/mutability.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/ops.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/oracles.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/shadowing.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/traits.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/concepts/unconstrained.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/crates_and_packages.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/dependencies.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/modules.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/workspaces.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/black_box_fns.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/bn254.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/boundedvec.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/hashmap.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/index.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/vec.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ec_primitives.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/eddsa.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/hashes.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/index.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/scalar.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/schnorr.mdx create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/logging.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/merkle_trees.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/options.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/recursion.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/traits.md create mode 100644 docs/versioned_docs/version-v0.25.0/noir/standard_library/zeroed.md create mode 100644 docs/versioned_docs/version-v0.25.0/reference/_category_.json create mode 100644 docs/versioned_docs/version-v0.25.0/reference/nargo_commands.md create mode 100644 docs/versioned_docs/version-v0.25.0/tutorials/noirjs_app.md create mode 100644 docs/versioned_sidebars/version-v0.25.0-sidebars.json create mode 100644 noir_stdlib/src/internal.nr delete mode 100644 noirc_macros/Cargo.toml delete mode 100644 noirc_macros/src/lib.rs create mode 100644 test_programs/execution_success/brillig_cow_assign/Nargo.toml create mode 100644 test_programs/execution_success/brillig_cow_assign/Prover.toml create mode 100644 test_programs/execution_success/brillig_cow_assign/src/main.nr create mode 100644 test_programs/execution_success/double_verify_proof_recursive/Nargo.toml create mode 100644 test_programs/execution_success/double_verify_proof_recursive/Prover.toml create mode 100644 test_programs/execution_success/double_verify_proof_recursive/src/main.nr create mode 100644 test_programs/execution_success/regression_4449/Nargo.toml create mode 100644 test_programs/execution_success/regression_4449/Prover.toml create mode 100644 test_programs/execution_success/regression_4449/src/main.nr create mode 100644 test_programs/noir_test_failure/should_fail_suite_with_one_failure/Nargo.toml create mode 100644 tooling/acvm_cli/Cargo.toml create mode 100644 tooling/acvm_cli/src/cli/execute_cmd.rs create mode 100644 tooling/acvm_cli/src/cli/fs/inputs.rs create mode 100644 tooling/acvm_cli/src/cli/fs/mod.rs create mode 100644 tooling/acvm_cli/src/cli/fs/witness.rs create mode 100644 tooling/acvm_cli/src/cli/mod.rs create mode 100644 tooling/acvm_cli/src/errors.rs create mode 100644 tooling/acvm_cli/src/main.rs diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 6841c89b691..5a1cd9c70bd 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -7ff9b71d8d87fc93ae7dbd8ba63f5176b0cd17be +58e15edf7fd3d32267b0aed883fc84f6cee327c9 diff --git a/.github/JS_PUBLISH_FAILED.md b/.github/JS_PUBLISH_FAILED.md index 5b9f79aac1f..9adba2776c8 100644 --- a/.github/JS_PUBLISH_FAILED.md +++ b/.github/JS_PUBLISH_FAILED.md @@ -1,6 +1,6 @@ --- title: "JS packages failed to publish" -assignees: TomAFrench kevaundray savio-sou +assignees: TomAFrench kevaundray Savio-Sou labels: js --- diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index f72a97b2684..470db3b78f7 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -20,7 +20,9 @@ jobs: steps: - name: Checkout Noir repo uses: actions/checkout@v4 - + with: + ref: ${{ inputs.noir-ref }} + - name: Setup toolchain uses: dtolnay/rust-toolchain@1.73.0 @@ -87,6 +89,8 @@ jobs: steps: - name: Checkout sources uses: actions/checkout@v4 + with: + ref: ${{ inputs.noir-ref }} - name: Setup toolchain uses: dtolnay/rust-toolchain@1.73.0 @@ -164,4 +168,4 @@ jobs: WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: update_existing: true - filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file + filename: .github/JS_PUBLISH_FAILED.md diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8916585d7f1..b38234ca0b9 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.24.0", - "acvm-repo": "0.40.0" + ".": "0.25.0", + "acvm-repo": "0.41.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index e9b2dfb48a5..cdbccf768ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,82 @@ # Changelog +## [0.25.0](https://github.com/noir-lang/noir/compare/v0.24.0...v0.25.0) (2024-03-11) + + +### ⚠ BREAKING CHANGES + +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) +* reserve `unchecked` keyword ([#4432](https://github.com/noir-lang/noir/issues/4432)) +* Remove empty value from bounded vec ([#4431](https://github.com/noir-lang/noir/issues/4431)) +* Ban Fields in for loop indices and bitwise ops ([#4376](https://github.com/noir-lang/noir/issues/4376)) +* bump msrv to 1.73.0 ([#4406](https://github.com/noir-lang/noir/issues/4406)) +* **ci:** Bump MSRV to 1.72.1 and enforce that ACVM can be published using updated lockfile ([#4385](https://github.com/noir-lang/noir/issues/4385)) +* Restrict bit sizes ([#4235](https://github.com/noir-lang/noir/issues/4235)) +* move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) +* note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) + +### Features + +* Add eddsa_poseidon_to_pub function to stdlib with test + docs ([#4473](https://github.com/noir-lang/noir/issues/4473)) ([00d2c32](https://github.com/noir-lang/noir/commit/00d2c32e58176cc5de3574c8435a54d415c4a5fa)) +* Add HashMap to the stdlib ([#4242](https://github.com/noir-lang/noir/issues/4242)) ([650ffc5](https://github.com/noir-lang/noir/commit/650ffc5053cdca4b6ad2e027fa1f4fd90ef64871)) +* Add option to set max memory for bb.js ([#4227](https://github.com/noir-lang/noir/issues/4227)) ([8a6b131](https://github.com/noir-lang/noir/commit/8a6b131402892a570bc2de6f5869de73b0bd979e)) +* Add overflow and underflow checks for unsigned integers in brillig ([#4445](https://github.com/noir-lang/noir/issues/4445)) ([21fc4b8](https://github.com/noir-lang/noir/commit/21fc4b85763dccae6dce0a46a318718c3c913471)) +* Add poseidon2 opcode implementation for acvm/brillig, and Noir ([#4398](https://github.com/noir-lang/noir/issues/4398)) ([10e8292](https://github.com/noir-lang/noir/commit/10e82920798380f50046e52db4a20ca205191ab7)) +* Added cast opcode and cast calldata (https://github.com/AztecProtocol/aztec-packages/pull/4423) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Allow type aliases to reference other aliases ([#4353](https://github.com/noir-lang/noir/issues/4353)) ([c44ef14](https://github.com/noir-lang/noir/commit/c44ef14847a436733206b6dd9590a7ab214ecd97)) +* Backpropagate constants in ACIR during optimization ([#3926](https://github.com/noir-lang/noir/issues/3926)) ([aad0da0](https://github.com/noir-lang/noir/commit/aad0da024c69663f42e6913e674682d5864b26ae)) +* **ci:** Use wasm-opt when compiling wasm packages ([#4334](https://github.com/noir-lang/noir/issues/4334)) ([e382921](https://github.com/noir-lang/noir/commit/e3829213d8411f84e117a14b43816967925095e0)) +* DAP Preflight and debugger compilation options ([#4185](https://github.com/noir-lang/noir/issues/4185)) ([e0ad0b2](https://github.com/noir-lang/noir/commit/e0ad0b2b31f6d46be75d23aec6a82850a9c4bd75)) +* Expose separate functions to compile programs vs contracts in `noir_wasm` ([#4413](https://github.com/noir-lang/noir/issues/4413)) ([7cd5fdb](https://github.com/noir-lang/noir/commit/7cd5fdb3d2a53475b7c8681231d517cab30f9f9b)) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Restrict bit sizes ([#4235](https://github.com/noir-lang/noir/issues/4235)) ([1048f81](https://github.com/noir-lang/noir/commit/1048f815abb1f27e9c84ab5b9568a3673c12a50a)) +* Run tests in parallel in `nargo test` ([#4484](https://github.com/noir-lang/noir/issues/4484)) ([761734e](https://github.com/noir-lang/noir/commit/761734e6cb3ff5911aa85d0cee96ad26092b4905)) +* Skip redundant range checks in brillig ([#4460](https://github.com/noir-lang/noir/issues/4460)) ([cb4c1c5](https://github.com/noir-lang/noir/commit/cb4c1c5264b95d01f69d99f916ced71ad9cdc9d1)) +* Sync from aztec-packages ([#4483](https://github.com/noir-lang/noir/issues/4483)) ([fe8f277](https://github.com/noir-lang/noir/commit/fe8f2776ccfde29209a2c3fc162311c99e4f59be)) +* Track stack frames and their variables in the debugger ([#4188](https://github.com/noir-lang/noir/issues/4188)) ([ae1a9d9](https://github.com/noir-lang/noir/commit/ae1a9d923998177516919bbba6ff4b0584fa1e9f)) +* TypeVariableKind for just Integers ([#4118](https://github.com/noir-lang/noir/issues/4118)) ([c956be8](https://github.com/noir-lang/noir/commit/c956be870fb47403a6da6585fce6bea2d40ee268)) +* Update error message when trying to load workspace as dependency ([#4393](https://github.com/noir-lang/noir/issues/4393)) ([d2585e7](https://github.com/noir-lang/noir/commit/d2585e738a63208fca3c9e26242e896d7f1df1e4)) + + +### Bug Fixes + +* **acir:** Array dynamic flatten ([#4351](https://github.com/noir-lang/noir/issues/4351)) ([b2aaeab](https://github.com/noir-lang/noir/commit/b2aaeab319a0c66c431a7db6852f743eccde8e98)) +* **acir:** Use types on dynamic arrays ([#4364](https://github.com/noir-lang/noir/issues/4364)) ([ba2c541](https://github.com/noir-lang/noir/commit/ba2c541ec45de92bba98de34771b73cbb7865c93)) +* Add `follow_bindings` to follow `Type::Alias` links ([#4521](https://github.com/noir-lang/noir/issues/4521)) ([b94adb9](https://github.com/noir-lang/noir/commit/b94adb92657e2b4a51dc7216a88e080aed1cf8b0)) +* Add handling to `noir_wasm` for projects without dependencies ([#4344](https://github.com/noir-lang/noir/issues/4344)) ([4982251](https://github.com/noir-lang/noir/commit/49822511710a7f1c42b8ed343e80456f8e6db2d9)) +* Allow type aliases in main ([#4505](https://github.com/noir-lang/noir/issues/4505)) ([8a5359c](https://github.com/noir-lang/noir/commit/8a5359c012579e54c2766de1074482a36ecada32)) +* Ban Fields in for loop indices and bitwise ops ([#4376](https://github.com/noir-lang/noir/issues/4376)) ([601fd9a](https://github.com/noir-lang/noir/commit/601fd9afc502236af1db0c4492698ba2298c7501)) +* Brillig range check with consistent bit size ([#4357](https://github.com/noir-lang/noir/issues/4357)) ([ea47d4a](https://github.com/noir-lang/noir/commit/ea47d4a67c6a18e4a7d3a49079d9eb24a1026a25)) +* Build noir_codegen when publishing ([#4448](https://github.com/noir-lang/noir/issues/4448)) ([cb1ceee](https://github.com/noir-lang/noir/commit/cb1ceee58b11b0ce6f8845361af3418d13c506bd)) +* Consistent bit size for truncate ([#4370](https://github.com/noir-lang/noir/issues/4370)) ([dcd7a1e](https://github.com/noir-lang/noir/commit/dcd7a1e561a68504b9038ffbb3c80f5c981f9f0c)) +* Correct formatting for databus visibility types ([#4423](https://github.com/noir-lang/noir/issues/4423)) ([cd796de](https://github.com/noir-lang/noir/commit/cd796dea4937dd1a261f154e5f2e599bbc649165)) +* Correct invalid brillig codegen for `EmbeddedCurvePoint.add` ([#4382](https://github.com/noir-lang/noir/issues/4382)) ([5051ec4](https://github.com/noir-lang/noir/commit/5051ec4d434a9e5cf405c68357faaf213e68de9e)) +* **docs:** Update install versions ([#4396](https://github.com/noir-lang/noir/issues/4396)) ([b283637](https://github.com/noir-lang/noir/commit/b283637e092038eb296c468168aec2d41e1c2734)) +* **docs:** Update noirjs_app for 0.23 ([#4378](https://github.com/noir-lang/noir/issues/4378)) ([f77f702](https://github.com/noir-lang/noir/commit/f77f702e0cfb81dcce4dd97e274b831e887ba5d2)) +* Enforce matching types of binary ops in SSA ([#4391](https://github.com/noir-lang/noir/issues/4391)) ([70866ae](https://github.com/noir-lang/noir/commit/70866aea976d59dbcbd4af34067fdd8f46555673)) +* Fix brillig slowdown when assigning arrays in loops ([#4472](https://github.com/noir-lang/noir/issues/4472)) ([2a53545](https://github.com/noir-lang/noir/commit/2a53545f4238c9b8535e6bc5b0720fa15f44f946)) +* **flake:** Stop flake.nix removing ignored-tests.txt ([#4455](https://github.com/noir-lang/noir/issues/4455)) ([ebaf05a](https://github.com/noir-lang/noir/commit/ebaf05ab10834dd10e04c7ea5130f96c6cdf98ed)) +* Force src impl for == on slices ([#4507](https://github.com/noir-lang/noir/issues/4507)) ([1691274](https://github.com/noir-lang/noir/commit/169127444e8b16a8aad4acfe29ba812894fd897c)) +* Handling of gh deps in noir_wasm ([#4499](https://github.com/noir-lang/noir/issues/4499)) ([1d65370](https://github.com/noir-lang/noir/commit/1d653704715bf9999eb6a40ed7500e752e2c73b7)) +* Iterative flattening pass ([#4492](https://github.com/noir-lang/noir/issues/4492)) ([33c1ef7](https://github.com/noir-lang/noir/commit/33c1ef70e7859fdee7babfb5d38191f53e73a0df)) +* Noir test incorrect reporting (https://github.com/AztecProtocol/aztec-packages/pull/4925) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Only add `.nr` files to file manager ([#4380](https://github.com/noir-lang/noir/issues/4380)) ([8536c7c](https://github.com/noir-lang/noir/commit/8536c7c8ea8fc6b740b2ae6d1aef3bc7e1907b8c)) +* Remove panic when generic array length is not resolvable ([#4408](https://github.com/noir-lang/noir/issues/4408)) ([00ab3db](https://github.com/noir-lang/noir/commit/00ab3db86b06111d144516e862902b8604284611)) +* Remove print from monomorphization pass ([#4417](https://github.com/noir-lang/noir/issues/4417)) ([27c66b3](https://github.com/noir-lang/noir/commit/27c66b3d0741e68ed591ae8a16b47b30bc87175f)) +* **ssa:** Handle mergers of slices returned from calls ([#4496](https://github.com/noir-lang/noir/issues/4496)) ([f988d02](https://github.com/noir-lang/noir/commit/f988d020e43cdf36a38613f2052d4518de39193a)) +* Use correct type for numeric generics ([#4386](https://github.com/noir-lang/noir/issues/4386)) ([0a1d109](https://github.com/noir-lang/noir/commit/0a1d109f478c997da5c43876fd12464af638bb15)) +* Variables from trait constraints being permanently bound over when used within a trait impl ([#4450](https://github.com/noir-lang/noir/issues/4450)) ([ac60ef5](https://github.com/noir-lang/noir/commit/ac60ef5e12fcfb907fbdcff709d7cbad05f2b939)) + + +### Miscellaneous Chores + +* Bump msrv to 1.73.0 ([#4406](https://github.com/noir-lang/noir/issues/4406)) ([b5e5c30](https://github.com/noir-lang/noir/commit/b5e5c30f4db52c79ef556e80660f39db369b1911)) +* **ci:** Bump MSRV to 1.72.1 and enforce that ACVM can be published using updated lockfile ([#4385](https://github.com/noir-lang/noir/issues/4385)) ([2fc95d2](https://github.com/noir-lang/noir/commit/2fc95d2d82b3220267ce7d5815e7073e00ef1360)) +* Move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove empty value from bounded vec ([#4431](https://github.com/noir-lang/noir/issues/4431)) ([b9384fb](https://github.com/noir-lang/noir/commit/b9384fb23abf4ab15e880fb7e03c21509a9fa8a6)) +* Reserve `unchecked` keyword ([#4432](https://github.com/noir-lang/noir/issues/4432)) ([9544813](https://github.com/noir-lang/noir/commit/9544813fabbd18a87dd88456e6a5b781bd0cf008)) + ## [0.24.0](https://github.com/noir-lang/noir/compare/v0.23.0...v0.24.0) (2024-02-12) diff --git a/Cargo.lock b/Cargo.lock index b2b6f8037bb..d5ce8a10509 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.40.0" +version = "0.41.0" dependencies = [ "acir_field", "base64 0.21.2", @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.40.0" +version = "0.41.0" dependencies = [ "ark-bls12-381", "ark-bn254", @@ -37,7 +37,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.40.0" +version = "0.41.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -53,7 +53,7 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.40.0" +version = "0.41.0" dependencies = [ "acir", "blake2", @@ -67,8 +67,28 @@ dependencies = [ ] [[package]] -name = "acvm_js" +name = "acvm_cli" version = "0.40.0" +dependencies = [ + "acir", + "acvm", + "bn254_blackbox_solver", + "clap", + "color-eyre", + "const_format", + "nargo", + "paste", + "proptest", + "rand 0.8.5", + "thiserror", + "toml 0.7.6", + "tracing-appender", + "tracing-subscriber", +] + +[[package]] +name = "acvm_js" +version = "0.41.0" dependencies = [ "acvm", "bn254_blackbox_solver", @@ -212,7 +232,7 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" -version = "0.24.0" +version = "0.25.0" [[package]] name = "ark-bls12-381" @@ -391,7 +411,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138985dd8aefbefeaa66b01b7f5b2b6b4c333fcef1cc5f32c63a2aabe37d6de3" dependencies = [ - "futures 0.3.28", + "futures 0.3.30", "lsp-types 0.94.1", "pin-project-lite", "rustix", @@ -413,7 +433,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aztec_macros" -version = "0.24.0" +version = "0.25.0" dependencies = [ "convert_case 0.6.0", "iter-extended", @@ -521,9 +541,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.3" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "bitmaps" @@ -602,7 +622,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.40.0" +version = "0.41.0" dependencies = [ "acir_field", "serde", @@ -610,7 +630,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.40.0" +version = "0.41.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -947,14 +967,14 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" [[package]] name = "console" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode 0.3.6", "lazy_static", "libc", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] @@ -1585,12 +1605,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1706,7 +1726,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.24.0" +version = "0.25.0" dependencies = [ "codespan-reporting", "iter-extended", @@ -1743,9 +1763,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -1758,9 +1778,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -1768,15 +1788,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -1786,15 +1806,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", @@ -1803,21 +1823,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures 0.1.31", "futures-channel", @@ -2076,9 +2096,9 @@ checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", @@ -2091,7 +2111,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2", "tokio", "tower-service", "tracing", @@ -2100,9 +2120,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http", @@ -2298,7 +2318,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.24.0" +version = "0.25.0" [[package]] name = "itertools" @@ -2343,7 +2363,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" dependencies = [ "derive_more", - "futures 0.3.28", + "futures 0.3.30", "jsonrpc-core", "jsonrpc-pubsub", "log", @@ -2358,7 +2378,7 @@ version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" dependencies = [ - "futures 0.3.28", + "futures 0.3.30", "futures-executor", "futures-util", "log", @@ -2373,7 +2393,7 @@ version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" dependencies = [ - "futures 0.3.28", + "futures 0.3.30", "jsonrpc-client-transports", ] @@ -2395,7 +2415,7 @@ version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" dependencies = [ - "futures 0.3.28", + "futures 0.3.30", "hyper", "jsonrpc-core", "jsonrpc-server-utils", @@ -2411,7 +2431,7 @@ version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" dependencies = [ - "futures 0.3.28", + "futures 0.3.30", "jsonrpc-core", "lazy_static", "log", @@ -2427,7 +2447,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" dependencies = [ "bytes", - "futures 0.3.28", + "futures 0.3.30", "globset", "jsonrpc-core", "lazy_static", @@ -2485,9 +2505,9 @@ checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" @@ -2651,7 +2671,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "codespan-reporting", @@ -2678,7 +2698,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "assert_cmd", @@ -2730,7 +2750,7 @@ dependencies = [ [[package]] name = "nargo_fmt" -version = "0.24.0" +version = "0.25.0" dependencies = [ "bytecount", "noirc_frontend", @@ -2742,7 +2762,7 @@ dependencies = [ [[package]] name = "nargo_toml" -version = "0.24.0" +version = "0.25.0" dependencies = [ "dirs", "fm", @@ -2815,7 +2835,7 @@ dependencies = [ [[package]] name = "noir_debugger" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "assert_cmd", @@ -2850,7 +2870,7 @@ dependencies = [ [[package]] name = "noir_lsp" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "async-lsp", @@ -2876,7 +2896,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "build-data", @@ -2899,7 +2919,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "iter-extended", @@ -2916,7 +2936,7 @@ dependencies = [ [[package]] name = "noirc_abi_wasm" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "build-data", @@ -2933,7 +2953,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "aztec_macros", @@ -2946,7 +2966,6 @@ dependencies = [ "noirc_errors", "noirc_evaluator", "noirc_frontend", - "noirc_macros", "rust-embed", "serde", "thiserror", @@ -2955,7 +2974,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "base64 0.21.2", @@ -2973,7 +2992,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "fxhash", @@ -2989,7 +3008,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "arena", @@ -3012,17 +3031,9 @@ dependencies = [ "tracing", ] -[[package]] -name = "noirc_macros" -version = "0.24.0" -dependencies = [ - "iter-extended", - "noirc_frontend", -] - [[package]] name = "noirc_printable_type" -version = "0.24.0" +version = "0.25.0" dependencies = [ "acvm", "iter-extended", @@ -3460,7 +3471,7 @@ checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.3.3", + "bitflags 2.4.2", "lazy_static", "num-traits", "rand 0.8.5", @@ -3922,15 +3933,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -4301,9 +4312,9 @@ dependencies = [ [[package]] name = "shared-buffer" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cf61602ee61e2f83dd016b3e6387245291cf728ea071c378b35088125b4d995" +checksum = "f6c99835bad52957e7aa241d3975ed17c1e5f8c92026377d117a606f36b84b16" dependencies = [ "bytes", "memmap2 0.6.2", @@ -4421,16 +4432,6 @@ dependencies = [ "serde", ] -[[package]] -name = "socket2" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.5" @@ -4742,7 +4743,7 @@ dependencies = [ "mio", "num_cpus", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio-macros", "windows-sys 0.48.0", ] @@ -5258,9 +5259,9 @@ dependencies = [ [[package]] name = "wasmer" -version = "4.2.4" +version = "4.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce45cc009177ca345a6d041f9062305ad467d15e7d41494f5b81ab46d62d7a58" +checksum = "5c15724dc25d1ee57962334aea8e41ade2675e5ea2ac6b8d42da6051b0face66" dependencies = [ "bytes", "cfg-if 1.0.0", @@ -5274,23 +5275,23 @@ dependencies = [ "shared-buffer", "target-lexicon", "thiserror", + "tracing", "wasm-bindgen", "wasmer-compiler", "wasmer-compiler-cranelift", "wasmer-derive", "wasmer-types", "wasmer-vm", - "wasmparser 0.83.0", - "wasmparser 0.95.0", + "wasmparser", "wat", "winapi", ] [[package]] name = "wasmer-compiler" -version = "4.2.4" +version = "4.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e044f6140c844602b920deb4526aea3cc9c0d7cf23f00730bb9b2034669f522a" +checksum = "55a7f3b3a96f8d844c25e2c032af9572306dd63fa93dc17bcca4c5458ac569bd" dependencies = [ "backtrace", "bytes", @@ -5309,15 +5310,15 @@ dependencies = [ "thiserror", "wasmer-types", "wasmer-vm", - "wasmparser 0.95.0", + "wasmparser", "winapi", ] [[package]] name = "wasmer-compiler-cranelift" -version = "4.2.4" +version = "4.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ce02358eb44a149d791c1d6648fb7f8b2f99cd55e3c4eef0474653ec8cc889" +checksum = "102e2c5bacac69495c4025767e2fa26797ffb27f242dccb7cf57d9cefd944386" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -5334,9 +5335,9 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "4.2.4" +version = "4.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c782d80401edb08e1eba206733f7859db6c997fc5a7f5fb44edc3ecd801468f6" +checksum = "0ea737fa08f95d6abc4459f42a70a9833e8974b814e74971d77ef473814f4d4c" dependencies = [ "proc-macro-error", "proc-macro2", @@ -5346,9 +5347,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "4.2.4" +version = "4.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd09e80d4d74bb9fd0ce6c3c106b1ceba1a050f9948db9d9b78ae53c172d6157" +checksum = "b0689110e291b0f07fc665f2824e5ff81df120848e8a9acfbf1a9bf7990773f9" dependencies = [ "bytecheck", "enum-iterator", @@ -5362,9 +5363,9 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "4.2.4" +version = "4.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdcd8a4fd36414a7b6a003dbfbd32393bce3e155d715dd877c05c1b7a41d224d" +checksum = "4cd41f822a1ac4242d478754e8ceba2806a00ea5072803622e1fe91e8e28b2a1" dependencies = [ "backtrace", "cc", @@ -5390,18 +5391,13 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.83.0" +version = "0.121.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" - -[[package]] -name = "wasmparser" -version = "0.95.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" +checksum = "9dbe55c8f9d0dbd25d9447a5a889ff90c0cc3feaa7395310d3d826b2c703eaab" dependencies = [ - "indexmap 1.9.3", - "url 2.4.0", + "bitflags 2.4.2", + "indexmap 2.0.0", + "semver", ] [[package]] @@ -5494,15 +5490,6 @@ dependencies = [ "windows_x86_64_msvc 0.33.0", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -5513,18 +5500,12 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.42.2" +name = "windows-sys" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets 0.52.4", ] [[package]] @@ -5543,10 +5524,19 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" +name = "windows-targets" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] [[package]] name = "windows_aarch64_gnullvm" @@ -5554,6 +5544,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + [[package]] name = "windows_aarch64_msvc" version = "0.33.0" @@ -5562,15 +5558,15 @@ checksum = "cd761fd3eb9ab8cc1ed81e56e567f02dd82c4c837e48ac3b2181b9ffc5060807" [[package]] name = "windows_aarch64_msvc" -version = "0.42.2" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" [[package]] name = "windows_i686_gnu" @@ -5580,15 +5576,15 @@ checksum = "cab0cf703a96bab2dc0c02c0fa748491294bf9b7feb27e1f4f96340f208ada0e" [[package]] name = "windows_i686_gnu" -version = "0.42.2" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" [[package]] name = "windows_i686_msvc" @@ -5598,15 +5594,15 @@ checksum = "8cfdbe89cc9ad7ce618ba34abc34bbb6c36d99e96cae2245b7943cd75ee773d0" [[package]] name = "windows_i686_msvc" -version = "0.42.2" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" [[package]] name = "windows_x86_64_gnu" @@ -5616,27 +5612,27 @@ checksum = "b4dd9b0c0e9ece7bb22e84d70d01b71c6d6248b81a3c60d11869451b4cb24784" [[package]] name = "windows_x86_64_gnu" -version = "0.42.2" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.2" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" [[package]] name = "windows_x86_64_msvc" @@ -5646,15 +5642,15 @@ checksum = "ff1e4aa646495048ec7f3ffddc411e1d829c026a2ec62b39da15c1055e406eaa" [[package]] name = "windows_x86_64_msvc" -version = "0.42.2" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" [[package]] name = "winnow" diff --git a/Cargo.toml b/Cargo.toml index 7d5da7b00d0..b8f9b9ceacc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,9 +1,8 @@ [workspace] members = [ - # Macros crates for metaprogramming + # Aztec Macro crate for metaprogramming "aztec_macros", - "noirc_macros", # Compiler crates "compiler/noirc_evaluator", "compiler/noirc_frontend", @@ -26,6 +25,7 @@ members = [ "tooling/nargo_toml", "tooling/noirc_abi", "tooling/noirc_abi_wasm", + "tooling/acvm_cli", # ACVM "acvm-repo/acir_field", "acvm-repo/acir", @@ -36,12 +36,12 @@ members = [ "acvm-repo/blackbox_solver", "acvm-repo/bn254_blackbox_solver", ] -default-members = ["tooling/nargo_cli"] +default-members = ["tooling/nargo_cli", "tooling/acvm_cli"] resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.24.0" +version = "0.25.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" @@ -52,12 +52,12 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir_field = { version = "0.40.0", path = "acvm-repo/acir_field", default-features = false } -acir = { version = "0.40.0", path = "acvm-repo/acir", default-features = false } -acvm = { version = "0.40.0", path = "acvm-repo/acvm" } -brillig = { version = "0.40.0", path = "acvm-repo/brillig", default-features = false } -brillig_vm = { version = "0.40.0", path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { version = "0.40.0", path = "acvm-repo/blackbox_solver", default-features = false } +acir_field = { version = "0.41.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.41.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.41.0", path = "acvm-repo/acvm" } +brillig = { version = "0.41.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.41.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.41.0", path = "acvm-repo/blackbox_solver", default-features = false } bn254_blackbox_solver = { version = "0.39.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies @@ -78,6 +78,7 @@ noir_lsp = { path = "tooling/lsp" } noir_debugger = { path = "tooling/debugger" } noirc_abi = { path = "tooling/noirc_abi" } bb_abstraction_leaks = { path = "tooling/bb_abstraction_leaks" } +acvm_cli = { path = "tooling/acvm_cli" } # LSP async-lsp = { version = "0.1.0", default-features = false } diff --git a/acvm-repo/CHANGELOG.md b/acvm-repo/CHANGELOG.md index acb465e5cc9..4f220d6eeba 100644 --- a/acvm-repo/CHANGELOG.md +++ b/acvm-repo/CHANGELOG.md @@ -5,6 +5,70 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.41.0](https://github.com/noir-lang/noir/compare/v0.40.0...v0.41.0) (2024-03-11) + + +### ⚠ BREAKING CHANGES + +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) +* move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) +* note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) +* rename bigint_neg into bigint_sub (https://github.com/AztecProtocol/aztec-packages/pull/4420) +* Add expression width into acir (https://github.com/AztecProtocol/aztec-packages/pull/4014) +* init storage macro (https://github.com/AztecProtocol/aztec-packages/pull/4200) +* **acir:** Move `is_recursive` flag to be part of the circuit definition (https://github.com/AztecProtocol/aztec-packages/pull/4221) +* Sync commits from `aztec-packages` ([#4144](https://github.com/noir-lang/noir/issues/4144)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Add bit size to const opcode (https://github.com/AztecProtocol/aztec-packages/pull/4385) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Add expression width into acir (https://github.com/AztecProtocol/aztec-packages/pull/4014) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Add instrumentation for tracking variables in debugging ([#4122](https://github.com/noir-lang/noir/issues/4122)) ([c58d691](https://github.com/noir-lang/noir/commit/c58d69141b54a918cd1675400c00bfd48720f896)) +* Add poseidon2 opcode implementation for acvm/brillig, and Noir ([#4398](https://github.com/noir-lang/noir/issues/4398)) ([10e8292](https://github.com/noir-lang/noir/commit/10e82920798380f50046e52db4a20ca205191ab7)) +* Add support for overriding expression width ([#4117](https://github.com/noir-lang/noir/issues/4117)) ([c8026d5](https://github.com/noir-lang/noir/commit/c8026d557d535b10fe455165d6445076df7a03de)) +* Added cast opcode and cast calldata (https://github.com/AztecProtocol/aztec-packages/pull/4423) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Allow brillig to read arrays directly from memory (https://github.com/AztecProtocol/aztec-packages/pull/4460) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Allow nested arrays and vectors in Brillig foreign calls (https://github.com/AztecProtocol/aztec-packages/pull/4478) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Allow variables and stack trace inspection in the debugger ([#4184](https://github.com/noir-lang/noir/issues/4184)) ([bf263fc](https://github.com/noir-lang/noir/commit/bf263fc8d843940f328a90f6366edd2671fb2682)) +* **avm:** Back in avm context with macro - refactor context (https://github.com/AztecProtocol/aztec-packages/pull/4438) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* **aztec-nr:** Initial work for aztec public vm macro (https://github.com/AztecProtocol/aztec-packages/pull/4400) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Backpropagate constants in ACIR during optimization ([#3926](https://github.com/noir-lang/noir/issues/3926)) ([aad0da0](https://github.com/noir-lang/noir/commit/aad0da024c69663f42e6913e674682d5864b26ae)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Evaluation of dynamic assert messages ([#4101](https://github.com/noir-lang/noir/issues/4101)) ([c284e01](https://github.com/noir-lang/noir/commit/c284e01bfe20ceae4414dc123624b5cbb8b66d09)) +* Init storage macro (https://github.com/AztecProtocol/aztec-packages/pull/4200) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove range constraints from witnesses which are constrained to be constants ([#3928](https://github.com/noir-lang/noir/issues/3928)) ([afe9c7a](https://github.com/noir-lang/noir/commit/afe9c7a38bb9d4245205d3aa46d4ce23d70a5671)) +* Remove replacement of boolean range opcodes with `AssertZero` opcodes ([#4107](https://github.com/noir-lang/noir/issues/4107)) ([dac0e87](https://github.com/noir-lang/noir/commit/dac0e87ee3be3446b92bbb12ef4832fd493fcee3)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) +* Sync commits from `aztec-packages` ([#4144](https://github.com/noir-lang/noir/issues/4144)) ([0205d3b](https://github.com/noir-lang/noir/commit/0205d3b4ad0cf5ffd775a43eb5af273a772cf138)) +* Sync from aztec-packages ([#4483](https://github.com/noir-lang/noir/issues/4483)) ([fe8f277](https://github.com/noir-lang/noir/commit/fe8f2776ccfde29209a2c3fc162311c99e4f59be)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) +* Noir test incorrect reporting (https://github.com/AztecProtocol/aztec-packages/pull/4925) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Remove panic from `init_log_level` in `acvm_js` ([#4195](https://github.com/noir-lang/noir/issues/4195)) ([2e26530](https://github.com/noir-lang/noir/commit/2e26530bf53006c1ed4fee310bcaa905c95dd95b)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) + + +### Miscellaneous Chores + +* **acir:** Move `is_recursive` flag to be part of the circuit definition (https://github.com/AztecProtocol/aztec-packages/pull/4221) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) ([836f171](https://github.com/noir-lang/noir/commit/836f17145c2901060706294461c2d282dd121b3e)) +* Rename bigint_neg into bigint_sub (https://github.com/AztecProtocol/aztec-packages/pull/4420) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) + ## [0.40.0](https://github.com/noir-lang/noir/compare/v0.39.0...v0.40.0) (2024-02-12) diff --git a/acvm-repo/acir/Cargo.toml b/acvm-repo/acir/Cargo.toml index 7021333486f..be859d7d054 100644 --- a/acvm-repo/acir/Cargo.toml +++ b/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acir_field/Cargo.toml b/acvm-repo/acir_field/Cargo.toml index 6f4971770bd..c2056b73277 100644 --- a/acvm-repo/acir_field/Cargo.toml +++ b/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acvm/Cargo.toml b/acvm-repo/acvm/Cargo.toml index fce9a8e8e8b..d585850170a 100644 --- a/acvm-repo/acvm/Cargo.toml +++ b/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acvm_js/Cargo.toml b/acvm-repo/acvm_js/Cargo.toml index 7ec814a72e5..63fca2bd32a 100644 --- a/acvm-repo/acvm_js/Cargo.toml +++ b/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index 876db9ccb62..0a9cd7235f5 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/acvm_js", - "version": "0.40.0", + "version": "0.41.0", "publishConfig": { "access": "public" }, diff --git a/acvm-repo/blackbox_solver/Cargo.toml b/acvm-repo/blackbox_solver/Cargo.toml index 0794b2dbe7e..a783193edba 100644 --- a/acvm-repo/blackbox_solver/Cargo.toml +++ b/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/blackbox_solver/src/ecdsa/mod.rs b/acvm-repo/blackbox_solver/src/ecdsa/mod.rs new file mode 100644 index 00000000000..cb3134bf0ab --- /dev/null +++ b/acvm-repo/blackbox_solver/src/ecdsa/mod.rs @@ -0,0 +1,22 @@ +use crate::BlackBoxResolutionError; + +mod secp256k1; +mod secp256r1; + +pub fn ecdsa_secp256k1_verify( + hashed_msg: &[u8], + public_key_x: &[u8; 32], + public_key_y: &[u8; 32], + signature: &[u8; 64], +) -> Result { + Ok(secp256k1::verify_signature(hashed_msg, public_key_x, public_key_y, signature)) +} + +pub fn ecdsa_secp256r1_verify( + hashed_msg: &[u8], + public_key_x: &[u8; 32], + public_key_y: &[u8; 32], + signature: &[u8; 64], +) -> Result { + Ok(secp256r1::verify_signature(hashed_msg, public_key_x, public_key_y, signature)) +} diff --git a/acvm-repo/blackbox_solver/src/ecdsa/secp256k1.rs b/acvm-repo/blackbox_solver/src/ecdsa/secp256k1.rs new file mode 100644 index 00000000000..17c51353f7f --- /dev/null +++ b/acvm-repo/blackbox_solver/src/ecdsa/secp256k1.rs @@ -0,0 +1,137 @@ +use k256::elliptic_curve::sec1::FromEncodedPoint; +use k256::elliptic_curve::PrimeField; + +use blake2::digest::generic_array::GenericArray; +use k256::{ecdsa::Signature, Scalar}; +use k256::{ + elliptic_curve::{ + sec1::{Coordinates, ToEncodedPoint}, + IsHigh, + }, + AffinePoint, EncodedPoint, ProjectivePoint, PublicKey, +}; + +pub(super) fn verify_signature( + hashed_msg: &[u8], + public_key_x_bytes: &[u8; 32], + public_key_y_bytes: &[u8; 32], + signature: &[u8; 64], +) -> bool { + // Convert the inputs into k256 data structures + let Ok(signature) = Signature::try_from(signature.as_slice()) else { + // Signature `r` and `s` are forbidden from being zero. + return false; + }; + + let point = EncodedPoint::from_affine_coordinates( + public_key_x_bytes.into(), + public_key_y_bytes.into(), + true, + ); + + let pubkey = PublicKey::from_encoded_point(&point); + let pubkey = if pubkey.is_some().into() { + pubkey.unwrap() + } else { + // Public key must sit on the Secp256k1 curve. + return false; + }; + + // Note: This is incorrect as it will panic if `hashed_msg >= k256::Secp256k1::ORDER`. + // In this scenario we should just take the leftmost bits from `hashed_msg` up to the group order length. + let z = Scalar::from_repr(*GenericArray::from_slice(hashed_msg)).unwrap(); + + // Finished converting bytes into data structures + + let r = signature.r(); + let s = signature.s(); + + // Ensure signature is "low S" normalized ala BIP 0062 + if s.is_high().into() { + return false; + } + + let s_inv = s.invert().unwrap(); + let u1 = z * s_inv; + let u2 = *r * s_inv; + + #[allow(non_snake_case)] + let R: AffinePoint = ((ProjectivePoint::GENERATOR * u1) + + (ProjectivePoint::from(*pubkey.as_affine()) * u2)) + .to_affine(); + + match R.to_encoded_point(false).coordinates() { + Coordinates::Uncompressed { x, y: _ } => Scalar::from_repr(*x).unwrap().eq(&r), + _ => unreachable!("Point is uncompressed"), + } +} + +#[cfg(test)] +mod secp256k1_tests { + use super::verify_signature; + + // 0x3a73f4123a5cd2121f21cd7e8d358835476949d035d9c2da6806b4633ac8c1e2, + const HASHED_MESSAGE: [u8; 32] = [ + 0x3a, 0x73, 0xf4, 0x12, 0x3a, 0x5c, 0xd2, 0x12, 0x1f, 0x21, 0xcd, 0x7e, 0x8d, 0x35, 0x88, + 0x35, 0x47, 0x69, 0x49, 0xd0, 0x35, 0xd9, 0xc2, 0xda, 0x68, 0x06, 0xb4, 0x63, 0x3a, 0xc8, + 0xc1, 0xe2, + ]; + // 0xa0434d9e47f3c86235477c7b1ae6ae5d3442d49b1943c2b752a68e2a47e247c7 + const PUB_KEY_X: [u8; 32] = [ + 0xa0, 0x43, 0x4d, 0x9e, 0x47, 0xf3, 0xc8, 0x62, 0x35, 0x47, 0x7c, 0x7b, 0x1a, 0xe6, 0xae, + 0x5d, 0x34, 0x42, 0xd4, 0x9b, 0x19, 0x43, 0xc2, 0xb7, 0x52, 0xa6, 0x8e, 0x2a, 0x47, 0xe2, + 0x47, 0xc7, + ]; + // 0x893aba425419bc27a3b6c7e693a24c696f794c2ed877a1593cbee53b037368d7 + const PUB_KEY_Y: [u8; 32] = [ + 0x89, 0x3a, 0xba, 0x42, 0x54, 0x19, 0xbc, 0x27, 0xa3, 0xb6, 0xc7, 0xe6, 0x93, 0xa2, 0x4c, + 0x69, 0x6f, 0x79, 0x4c, 0x2e, 0xd8, 0x77, 0xa1, 0x59, 0x3c, 0xbe, 0xe5, 0x3b, 0x03, 0x73, + 0x68, 0xd7, + ]; + // 0xe5081c80ab427dc370346f4a0e31aa2bad8d9798c38061db9ae55a4e8df454fd28119894344e71b78770cc931d61f480ecbb0b89d6eb69690161e49a715fcd55 + const SIGNATURE: [u8; 64] = [ + 0xe5, 0x08, 0x1c, 0x80, 0xab, 0x42, 0x7d, 0xc3, 0x70, 0x34, 0x6f, 0x4a, 0x0e, 0x31, 0xaa, + 0x2b, 0xad, 0x8d, 0x97, 0x98, 0xc3, 0x80, 0x61, 0xdb, 0x9a, 0xe5, 0x5a, 0x4e, 0x8d, 0xf4, + 0x54, 0xfd, 0x28, 0x11, 0x98, 0x94, 0x34, 0x4e, 0x71, 0xb7, 0x87, 0x70, 0xcc, 0x93, 0x1d, + 0x61, 0xf4, 0x80, 0xec, 0xbb, 0x0b, 0x89, 0xd6, 0xeb, 0x69, 0x69, 0x01, 0x61, 0xe4, 0x9a, + 0x71, 0x5f, 0xcd, 0x55, + ]; + + #[test] + fn verifies_valid_signature_with_low_s_value() { + let valid = verify_signature(&HASHED_MESSAGE, &PUB_KEY_X, &PUB_KEY_Y, &SIGNATURE); + + assert!(valid); + } + + #[test] + fn rejects_invalid_signature() { + // This signature is invalid as ECDSA specifies that `r` and `s` must be non-zero. + let invalid_signature: [u8; 64] = [0x00; 64]; + + let valid = verify_signature(&HASHED_MESSAGE, &PUB_KEY_X, &PUB_KEY_Y, &invalid_signature); + assert!(!valid); + } + + #[test] + fn rejects_invalid_public_key() { + let invalid_pub_key_x: [u8; 32] = [0xff; 32]; + let invalid_pub_key_y: [u8; 32] = [0xff; 32]; + + let valid = + verify_signature(&HASHED_MESSAGE, &invalid_pub_key_x, &invalid_pub_key_y, &SIGNATURE); + + assert!(!valid); + } + + #[test] + #[ignore = "ECDSA verification does not currently handle long hashes correctly"] + fn trims_overly_long_hashes_to_correct_length() { + let mut long_hashed_message = HASHED_MESSAGE.to_vec(); + long_hashed_message.push(0xff); + + let valid = verify_signature(&long_hashed_message, &PUB_KEY_X, &PUB_KEY_Y, &SIGNATURE); + + assert!(valid); + } +} diff --git a/acvm-repo/blackbox_solver/src/ecdsa/secp256r1.rs b/acvm-repo/blackbox_solver/src/ecdsa/secp256r1.rs new file mode 100644 index 00000000000..54559d7c774 --- /dev/null +++ b/acvm-repo/blackbox_solver/src/ecdsa/secp256r1.rs @@ -0,0 +1,133 @@ +use p256::elliptic_curve::sec1::FromEncodedPoint; +use p256::elliptic_curve::PrimeField; + +use blake2::digest::generic_array::GenericArray; +use p256::{ecdsa::Signature, Scalar}; +use p256::{ + elliptic_curve::{ + sec1::{Coordinates, ToEncodedPoint}, + IsHigh, + }, + AffinePoint, EncodedPoint, ProjectivePoint, PublicKey, +}; + +pub(super) fn verify_signature( + hashed_msg: &[u8], + public_key_x_bytes: &[u8; 32], + public_key_y_bytes: &[u8; 32], + signature: &[u8; 64], +) -> bool { + // Convert the inputs into k256 data structures + let Ok(signature) = Signature::try_from(signature.as_slice()) else { + // Signature `r` and `s` are forbidden from being zero. + return false; + }; + + let point = EncodedPoint::from_affine_coordinates( + public_key_x_bytes.into(), + public_key_y_bytes.into(), + true, + ); + + let pubkey = PublicKey::from_encoded_point(&point); + let pubkey = if pubkey.is_some().into() { + pubkey.unwrap() + } else { + // Public key must sit on the Secp256r1 curve. + return false; + }; + + // Note: This is incorrect as it will panic if `hashed_msg >= p256::NistP256::ORDER`. + // In this scenario we should just take the leftmost bits from `hashed_msg` up to the group order length. + let z = Scalar::from_repr(*GenericArray::from_slice(hashed_msg)).unwrap(); + + // Finished converting bytes into data structures + + let r = signature.r(); + let s = signature.s(); + + // Ensure signature is "low S" normalized ala BIP 0062 + if s.is_high().into() { + return false; + } + + let s_inv = s.invert().unwrap(); + let u1 = z * s_inv; + let u2 = *r * s_inv; + + #[allow(non_snake_case)] + let R: AffinePoint = ((ProjectivePoint::GENERATOR * u1) + + (ProjectivePoint::from(*pubkey.as_affine()) * u2)) + .to_affine(); + + match R.to_encoded_point(false).coordinates() { + Coordinates::Uncompressed { x, y: _ } => Scalar::from_repr(*x).unwrap().eq(&r), + _ => unreachable!("Point is uncompressed"), + } +} + +#[cfg(test)] +mod secp256r1_tests { + use super::verify_signature; + + // 0x54705ba3baafdbdfba8c5f9a70f7a89bee98d906b53e31074da7baecdc0da9ad + const HASHED_MESSAGE: [u8; 32] = [ + 84, 112, 91, 163, 186, 175, 219, 223, 186, 140, 95, 154, 112, 247, 168, 155, 238, 152, 217, + 6, 181, 62, 49, 7, 77, 167, 186, 236, 220, 13, 169, 173, + ]; + // 0x550f471003f3df97c3df506ac797f6721fb1a1fb7b8f6f83d224498a65c88e24 + const PUB_KEY_X: [u8; 32] = [ + 85, 15, 71, 16, 3, 243, 223, 151, 195, 223, 80, 106, 199, 151, 246, 114, 31, 177, 161, 251, + 123, 143, 111, 131, 210, 36, 73, 138, 101, 200, 142, 36, + ]; + // 0x136093d7012e509a73715cbd0b00a3cc0ff4b5c01b3ffa196ab1fb327036b8e6 + const PUB_KEY_Y: [u8; 32] = [ + 19, 96, 147, 215, 1, 46, 80, 154, 115, 113, 92, 189, 11, 0, 163, 204, 15, 244, 181, 192, + 27, 63, 250, 25, 106, 177, 251, 50, 112, 54, 184, 230, + ]; + // 0x2c70a8d084b62bfc5ce03641caf9f72ad4da8c81bfe6ec9487bb5e1bef62a13218ad9ee29eaf351fdc50f1520c425e9b908a07278b43b0ec7b872778c14e0784 + const SIGNATURE: [u8; 64] = [ + 44, 112, 168, 208, 132, 182, 43, 252, 92, 224, 54, 65, 202, 249, 247, 42, 212, 218, 140, + 129, 191, 230, 236, 148, 135, 187, 94, 27, 239, 98, 161, 50, 24, 173, 158, 226, 158, 175, + 53, 31, 220, 80, 241, 82, 12, 66, 94, 155, 144, 138, 7, 39, 139, 67, 176, 236, 123, 135, + 39, 120, 193, 78, 7, 132, + ]; + + #[test] + fn verifies_valid_signature_with_low_s_value() { + let valid = verify_signature(&HASHED_MESSAGE, &PUB_KEY_X, &PUB_KEY_Y, &SIGNATURE); + + assert!(valid); + } + + #[test] + fn rejects_invalid_signature() { + // This signature is invalid as ECDSA specifies that `r` and `s` must be non-zero. + let invalid_signature: [u8; 64] = [0x00; 64]; + + let valid = verify_signature(&HASHED_MESSAGE, &PUB_KEY_X, &PUB_KEY_Y, &invalid_signature); + assert!(!valid); + } + + #[test] + fn rejects_invalid_public_key() { + let invalid_pub_key_x: [u8; 32] = [0xff; 32]; + let invalid_pub_key_y: [u8; 32] = [0xff; 32]; + + let valid = + verify_signature(&HASHED_MESSAGE, &invalid_pub_key_x, &invalid_pub_key_y, &SIGNATURE); + + assert!(!valid); + } + + #[test] + #[ignore = "ECDSA verification does not currently handle long hashes correctly"] + fn trims_overly_long_hashes_to_correct_length() { + let mut long_hashed_message = HASHED_MESSAGE.to_vec(); + long_hashed_message.push(0xff); + + let valid = verify_signature(&long_hashed_message, &PUB_KEY_X, &PUB_KEY_Y, &SIGNATURE); + + assert!(valid); + } +} diff --git a/acvm-repo/blackbox_solver/src/hash.rs b/acvm-repo/blackbox_solver/src/hash.rs new file mode 100644 index 00000000000..ac56029b436 --- /dev/null +++ b/acvm-repo/blackbox_solver/src/hash.rs @@ -0,0 +1,126 @@ +use acir::BlackBoxFunc; +use blake2::digest::generic_array::GenericArray; +use blake2::{Blake2s256, Digest}; +use sha2::Sha256; +use sha3::Keccak256; + +use crate::BlackBoxResolutionError; + +/// Does a generic hash of the inputs returning the resulting 32 bytes separately. +fn generic_hash_256(message: &[u8]) -> Result<[u8; 32], String> { + let output_bytes: [u8; 32] = + D::digest(message).as_slice().try_into().map_err(|_| "digest should be 256 bits")?; + + Ok(output_bytes) +} + +pub fn sha256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { + generic_hash_256::(inputs) + .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::SHA256, err)) +} + +pub fn blake2s(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { + generic_hash_256::(inputs) + .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Blake2s, err)) +} + +pub fn blake3(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { + Ok(blake3::hash(inputs).into()) +} + +pub fn keccak256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { + generic_hash_256::(inputs) + .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Keccak256, err)) +} + +pub fn sha256compression(state: &mut [u32; 8], msg_blocks: &[u32; 16]) { + let mut blocks = [0_u8; 64]; + for (i, block) in msg_blocks.iter().enumerate() { + let bytes = block.to_be_bytes(); + blocks[i * 4..i * 4 + 4].copy_from_slice(&bytes); + } + let blocks: GenericArray = blocks.into(); + sha2::compress256(state, &[blocks]); +} + +const KECCAK_LANES: usize = 25; + +pub fn keccakf1600( + mut state: [u64; KECCAK_LANES], +) -> Result<[u64; KECCAK_LANES], BlackBoxResolutionError> { + keccak::f1600(&mut state); + Ok(state) +} + +#[cfg(test)] +mod keccakf1600_tests { + use super::keccakf1600; + + #[test] + fn sanity_check() { + // Test vectors are copied from XKCP (eXtended Keccak Code Package) + // https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KeccakF-1600-IntermediateValues.txt + let zero_state = [0u64; 25]; + + let expected_state_first = [ + 0xF1258F7940E1DDE7, + 0x84D5CCF933C0478A, + 0xD598261EA65AA9EE, + 0xBD1547306F80494D, + 0x8B284E056253D057, + 0xFF97A42D7F8E6FD4, + 0x90FEE5A0A44647C4, + 0x8C5BDA0CD6192E76, + 0xAD30A6F71B19059C, + 0x30935AB7D08FFC64, + 0xEB5AA93F2317D635, + 0xA9A6E6260D712103, + 0x81A57C16DBCF555F, + 0x43B831CD0347C826, + 0x01F22F1A11A5569F, + 0x05E5635A21D9AE61, + 0x64BEFEF28CC970F2, + 0x613670957BC46611, + 0xB87C5A554FD00ECB, + 0x8C3EE88A1CCF32C8, + 0x940C7922AE3A2614, + 0x1841F924A2C509E4, + 0x16F53526E70465C2, + 0x75F644E97F30A13B, + 0xEAF1FF7B5CECA249, + ]; + let expected_state_second = [ + 0x2D5C954DF96ECB3C, + 0x6A332CD07057B56D, + 0x093D8D1270D76B6C, + 0x8A20D9B25569D094, + 0x4F9C4F99E5E7F156, + 0xF957B9A2DA65FB38, + 0x85773DAE1275AF0D, + 0xFAF4F247C3D810F7, + 0x1F1B9EE6F79A8759, + 0xE4FECC0FEE98B425, + 0x68CE61B6B9CE68A1, + 0xDEEA66C4BA8F974F, + 0x33C43D836EAFB1F5, + 0xE00654042719DBD9, + 0x7CF8A9F009831265, + 0xFD5449A6BF174743, + 0x97DDAD33D8994B40, + 0x48EAD5FC5D0BE774, + 0xE3B8C8EE55B7B03C, + 0x91A0226E649E42E9, + 0x900E3129E7BADD7B, + 0x202A9EC5FAA3CCE8, + 0x5B3402464E1C3DB6, + 0x609F4E62A44C1059, + 0x20D06CD26A8FBF5C, + ]; + + let state_first = keccakf1600(zero_state).unwrap(); + let state_second = keccakf1600(state_first).unwrap(); + + assert_eq!(state_first, expected_state_first); + assert_eq!(state_second, expected_state_second); + } +} diff --git a/acvm-repo/blackbox_solver/src/lib.rs b/acvm-repo/blackbox_solver/src/lib.rs index e033344fefa..dc798bdab32 100644 --- a/acvm-repo/blackbox_solver/src/lib.rs +++ b/acvm-repo/blackbox_solver/src/lib.rs @@ -8,456 +8,18 @@ //! For functions that have a reference implementation, such as [keccak256], this crate exports the reference implementation directly. use acir::BlackBoxFunc; -use blake2::digest::generic_array::GenericArray; -use blake2::{Blake2s256, Digest}; -use sha2::Sha256; -use sha3::Keccak256; use thiserror::Error; mod curve_specific_solver; +mod ecdsa; +mod hash; pub use curve_specific_solver::{BlackBoxFunctionSolver, StubbedBlackBoxSolver}; +pub use ecdsa::{ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; +pub use hash::{blake2s, blake3, keccak256, keccakf1600, sha256, sha256compression}; #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum BlackBoxResolutionError { #[error("failed to solve blackbox function: {0}, reason: {1}")] Failed(BlackBoxFunc, String), } - -pub fn sha256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { - generic_hash_256::(inputs) - .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::SHA256, err)) -} - -pub fn blake2s(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { - generic_hash_256::(inputs) - .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Blake2s, err)) -} - -pub fn blake3(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { - Ok(blake3::hash(inputs).into()) -} - -pub fn keccak256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { - generic_hash_256::(inputs) - .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Keccak256, err)) -} - -pub fn sha256compression(state: &mut [u32; 8], msg_blocks: &[u32; 16]) { - let mut blocks = [0_u8; 64]; - for (i, block) in msg_blocks.iter().enumerate() { - let bytes = block.to_be_bytes(); - blocks[i * 4..i * 4 + 4].copy_from_slice(&bytes); - } - let blocks: GenericArray = blocks.into(); - sha2::compress256(state, &[blocks]); -} - -const KECCAK_LANES: usize = 25; - -pub fn keccakf1600( - mut state: [u64; KECCAK_LANES], -) -> Result<[u64; KECCAK_LANES], BlackBoxResolutionError> { - keccak::f1600(&mut state); - Ok(state) -} - -pub fn ecdsa_secp256k1_verify( - hashed_msg: &[u8], - public_key_x: &[u8; 32], - public_key_y: &[u8; 32], - signature: &[u8; 64], -) -> Result { - Ok(verify_secp256k1_ecdsa_signature(hashed_msg, public_key_x, public_key_y, signature)) -} - -pub fn ecdsa_secp256r1_verify( - hashed_msg: &[u8], - public_key_x: &[u8; 32], - public_key_y: &[u8; 32], - signature: &[u8; 64], -) -> Result { - Ok(verify_secp256r1_ecdsa_signature(hashed_msg, public_key_x, public_key_y, signature)) -} - -/// Does a generic hash of the inputs returning the resulting 32 bytes separately. -fn generic_hash_256(message: &[u8]) -> Result<[u8; 32], String> { - let output_bytes: [u8; 32] = - D::digest(message).as_slice().try_into().map_err(|_| "digest should be 256 bits")?; - - Ok(output_bytes) -} - -fn verify_secp256k1_ecdsa_signature( - hashed_msg: &[u8], - public_key_x_bytes: &[u8; 32], - public_key_y_bytes: &[u8; 32], - signature: &[u8; 64], -) -> bool { - use k256::elliptic_curve::sec1::FromEncodedPoint; - use k256::elliptic_curve::PrimeField; - - use k256::{ecdsa::Signature, Scalar}; - use k256::{ - elliptic_curve::{ - sec1::{Coordinates, ToEncodedPoint}, - IsHigh, - }, - AffinePoint, EncodedPoint, ProjectivePoint, PublicKey, - }; - // Convert the inputs into k256 data structures - - let Ok(signature) = Signature::try_from(signature.as_slice()) else { - // Signature `r` and `s` are forbidden from being zero. - return false; - }; - - let point = EncodedPoint::from_affine_coordinates( - public_key_x_bytes.into(), - public_key_y_bytes.into(), - true, - ); - - let pubkey = PublicKey::from_encoded_point(&point); - let pubkey = if pubkey.is_some().into() { - pubkey.unwrap() - } else { - // Public key must sit on the Secp256k1 curve. - return false; - }; - - // Note: This is incorrect as it will panic if `hashed_msg >= k256::Secp256k1::ORDER`. - // In this scenario we should just take the leftmost bits from `hashed_msg` up to the group order length. - let z = Scalar::from_repr(*GenericArray::from_slice(hashed_msg)).unwrap(); - - // Finished converting bytes into data structures - - let r = signature.r(); - let s = signature.s(); - - // Ensure signature is "low S" normalized ala BIP 0062 - if s.is_high().into() { - return false; - } - - let s_inv = s.invert().unwrap(); - let u1 = z * s_inv; - let u2 = *r * s_inv; - - #[allow(non_snake_case)] - let R: AffinePoint = ((ProjectivePoint::GENERATOR * u1) - + (ProjectivePoint::from(*pubkey.as_affine()) * u2)) - .to_affine(); - - match R.to_encoded_point(false).coordinates() { - Coordinates::Uncompressed { x, y: _ } => Scalar::from_repr(*x).unwrap().eq(&r), - _ => unreachable!("Point is uncompressed"), - } -} - -fn verify_secp256r1_ecdsa_signature( - hashed_msg: &[u8], - public_key_x_bytes: &[u8; 32], - public_key_y_bytes: &[u8; 32], - signature: &[u8; 64], -) -> bool { - use p256::elliptic_curve::sec1::FromEncodedPoint; - use p256::elliptic_curve::PrimeField; - - use p256::{ecdsa::Signature, Scalar}; - use p256::{ - elliptic_curve::{ - sec1::{Coordinates, ToEncodedPoint}, - IsHigh, - }, - AffinePoint, EncodedPoint, ProjectivePoint, PublicKey, - }; - - // Convert the inputs into k256 data structures - - let Ok(signature) = Signature::try_from(signature.as_slice()) else { - // Signature `r` and `s` are forbidden from being zero. - return false; - }; - - let point = EncodedPoint::from_affine_coordinates( - public_key_x_bytes.into(), - public_key_y_bytes.into(), - true, - ); - - let pubkey = PublicKey::from_encoded_point(&point); - let pubkey = if pubkey.is_some().into() { - pubkey.unwrap() - } else { - // Public key must sit on the Secp256r1 curve. - return false; - }; - - // Note: This is incorrect as it will panic if `hashed_msg >= p256::NistP256::ORDER`. - // In this scenario we should just take the leftmost bits from `hashed_msg` up to the group order length. - let z = Scalar::from_repr(*GenericArray::from_slice(hashed_msg)).unwrap(); - - // Finished converting bytes into data structures - - let r = signature.r(); - let s = signature.s(); - - // Ensure signature is "low S" normalized ala BIP 0062 - if s.is_high().into() { - return false; - } - - let s_inv = s.invert().unwrap(); - let u1 = z * s_inv; - let u2 = *r * s_inv; - - #[allow(non_snake_case)] - let R: AffinePoint = ((ProjectivePoint::GENERATOR * u1) - + (ProjectivePoint::from(*pubkey.as_affine()) * u2)) - .to_affine(); - - match R.to_encoded_point(false).coordinates() { - Coordinates::Uncompressed { x, y: _ } => Scalar::from_repr(*x).unwrap().eq(&r), - _ => unreachable!("Point is uncompressed"), - } -} - -#[cfg(test)] -mod keccakf1600_tests { - use crate::keccakf1600; - - #[test] - fn sanity_check() { - // Test vectors are copied from XKCP (eXtended Keccak Code Package) - // https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KeccakF-1600-IntermediateValues.txt - let zero_state = [0u64; 25]; - - let expected_state_first = [ - 0xF1258F7940E1DDE7, - 0x84D5CCF933C0478A, - 0xD598261EA65AA9EE, - 0xBD1547306F80494D, - 0x8B284E056253D057, - 0xFF97A42D7F8E6FD4, - 0x90FEE5A0A44647C4, - 0x8C5BDA0CD6192E76, - 0xAD30A6F71B19059C, - 0x30935AB7D08FFC64, - 0xEB5AA93F2317D635, - 0xA9A6E6260D712103, - 0x81A57C16DBCF555F, - 0x43B831CD0347C826, - 0x01F22F1A11A5569F, - 0x05E5635A21D9AE61, - 0x64BEFEF28CC970F2, - 0x613670957BC46611, - 0xB87C5A554FD00ECB, - 0x8C3EE88A1CCF32C8, - 0x940C7922AE3A2614, - 0x1841F924A2C509E4, - 0x16F53526E70465C2, - 0x75F644E97F30A13B, - 0xEAF1FF7B5CECA249, - ]; - let expected_state_second = [ - 0x2D5C954DF96ECB3C, - 0x6A332CD07057B56D, - 0x093D8D1270D76B6C, - 0x8A20D9B25569D094, - 0x4F9C4F99E5E7F156, - 0xF957B9A2DA65FB38, - 0x85773DAE1275AF0D, - 0xFAF4F247C3D810F7, - 0x1F1B9EE6F79A8759, - 0xE4FECC0FEE98B425, - 0x68CE61B6B9CE68A1, - 0xDEEA66C4BA8F974F, - 0x33C43D836EAFB1F5, - 0xE00654042719DBD9, - 0x7CF8A9F009831265, - 0xFD5449A6BF174743, - 0x97DDAD33D8994B40, - 0x48EAD5FC5D0BE774, - 0xE3B8C8EE55B7B03C, - 0x91A0226E649E42E9, - 0x900E3129E7BADD7B, - 0x202A9EC5FAA3CCE8, - 0x5B3402464E1C3DB6, - 0x609F4E62A44C1059, - 0x20D06CD26A8FBF5C, - ]; - - let state_first = keccakf1600(zero_state).unwrap(); - let state_second = keccakf1600(state_first).unwrap(); - - assert_eq!(state_first, expected_state_first); - assert_eq!(state_second, expected_state_second); - } -} - -#[cfg(test)] -mod secp256k1_tests { - use super::verify_secp256k1_ecdsa_signature; - - // 0x3a73f4123a5cd2121f21cd7e8d358835476949d035d9c2da6806b4633ac8c1e2, - const HASHED_MESSAGE: [u8; 32] = [ - 0x3a, 0x73, 0xf4, 0x12, 0x3a, 0x5c, 0xd2, 0x12, 0x1f, 0x21, 0xcd, 0x7e, 0x8d, 0x35, 0x88, - 0x35, 0x47, 0x69, 0x49, 0xd0, 0x35, 0xd9, 0xc2, 0xda, 0x68, 0x06, 0xb4, 0x63, 0x3a, 0xc8, - 0xc1, 0xe2, - ]; - // 0xa0434d9e47f3c86235477c7b1ae6ae5d3442d49b1943c2b752a68e2a47e247c7 - const PUB_KEY_X: [u8; 32] = [ - 0xa0, 0x43, 0x4d, 0x9e, 0x47, 0xf3, 0xc8, 0x62, 0x35, 0x47, 0x7c, 0x7b, 0x1a, 0xe6, 0xae, - 0x5d, 0x34, 0x42, 0xd4, 0x9b, 0x19, 0x43, 0xc2, 0xb7, 0x52, 0xa6, 0x8e, 0x2a, 0x47, 0xe2, - 0x47, 0xc7, - ]; - // 0x893aba425419bc27a3b6c7e693a24c696f794c2ed877a1593cbee53b037368d7 - const PUB_KEY_Y: [u8; 32] = [ - 0x89, 0x3a, 0xba, 0x42, 0x54, 0x19, 0xbc, 0x27, 0xa3, 0xb6, 0xc7, 0xe6, 0x93, 0xa2, 0x4c, - 0x69, 0x6f, 0x79, 0x4c, 0x2e, 0xd8, 0x77, 0xa1, 0x59, 0x3c, 0xbe, 0xe5, 0x3b, 0x03, 0x73, - 0x68, 0xd7, - ]; - // 0xe5081c80ab427dc370346f4a0e31aa2bad8d9798c38061db9ae55a4e8df454fd28119894344e71b78770cc931d61f480ecbb0b89d6eb69690161e49a715fcd55 - const SIGNATURE: [u8; 64] = [ - 0xe5, 0x08, 0x1c, 0x80, 0xab, 0x42, 0x7d, 0xc3, 0x70, 0x34, 0x6f, 0x4a, 0x0e, 0x31, 0xaa, - 0x2b, 0xad, 0x8d, 0x97, 0x98, 0xc3, 0x80, 0x61, 0xdb, 0x9a, 0xe5, 0x5a, 0x4e, 0x8d, 0xf4, - 0x54, 0xfd, 0x28, 0x11, 0x98, 0x94, 0x34, 0x4e, 0x71, 0xb7, 0x87, 0x70, 0xcc, 0x93, 0x1d, - 0x61, 0xf4, 0x80, 0xec, 0xbb, 0x0b, 0x89, 0xd6, 0xeb, 0x69, 0x69, 0x01, 0x61, 0xe4, 0x9a, - 0x71, 0x5f, 0xcd, 0x55, - ]; - - #[test] - fn verifies_valid_signature_with_low_s_value() { - let valid = - verify_secp256k1_ecdsa_signature(&HASHED_MESSAGE, &PUB_KEY_X, &PUB_KEY_Y, &SIGNATURE); - - assert!(valid); - } - - #[test] - fn rejects_invalid_signature() { - // This signature is invalid as ECDSA specifies that `r` and `s` must be non-zero. - let invalid_signature: [u8; 64] = [0x00; 64]; - - let valid = verify_secp256k1_ecdsa_signature( - &HASHED_MESSAGE, - &PUB_KEY_X, - &PUB_KEY_Y, - &invalid_signature, - ); - assert!(!valid); - } - - #[test] - fn rejects_invalid_public_key() { - let invalid_pub_key_x: [u8; 32] = [0xff; 32]; - let invalid_pub_key_y: [u8; 32] = [0xff; 32]; - - let valid = verify_secp256k1_ecdsa_signature( - &HASHED_MESSAGE, - &invalid_pub_key_x, - &invalid_pub_key_y, - &SIGNATURE, - ); - - assert!(!valid); - } - - #[test] - #[ignore = "ECDSA verification does not currently handle long hashes correctly"] - fn trims_overly_long_hashes_to_correct_length() { - let mut long_hashed_message = HASHED_MESSAGE.to_vec(); - long_hashed_message.push(0xff); - - let valid = verify_secp256k1_ecdsa_signature( - &long_hashed_message, - &PUB_KEY_X, - &PUB_KEY_Y, - &SIGNATURE, - ); - - assert!(valid); - } -} - -#[cfg(test)] -mod secp256r1_tests { - use super::verify_secp256r1_ecdsa_signature; - - // 0x54705ba3baafdbdfba8c5f9a70f7a89bee98d906b53e31074da7baecdc0da9ad - const HASHED_MESSAGE: [u8; 32] = [ - 84, 112, 91, 163, 186, 175, 219, 223, 186, 140, 95, 154, 112, 247, 168, 155, 238, 152, 217, - 6, 181, 62, 49, 7, 77, 167, 186, 236, 220, 13, 169, 173, - ]; - // 0x550f471003f3df97c3df506ac797f6721fb1a1fb7b8f6f83d224498a65c88e24 - const PUB_KEY_X: [u8; 32] = [ - 85, 15, 71, 16, 3, 243, 223, 151, 195, 223, 80, 106, 199, 151, 246, 114, 31, 177, 161, 251, - 123, 143, 111, 131, 210, 36, 73, 138, 101, 200, 142, 36, - ]; - // 0x136093d7012e509a73715cbd0b00a3cc0ff4b5c01b3ffa196ab1fb327036b8e6 - const PUB_KEY_Y: [u8; 32] = [ - 19, 96, 147, 215, 1, 46, 80, 154, 115, 113, 92, 189, 11, 0, 163, 204, 15, 244, 181, 192, - 27, 63, 250, 25, 106, 177, 251, 50, 112, 54, 184, 230, - ]; - // 0x2c70a8d084b62bfc5ce03641caf9f72ad4da8c81bfe6ec9487bb5e1bef62a13218ad9ee29eaf351fdc50f1520c425e9b908a07278b43b0ec7b872778c14e0784 - const SIGNATURE: [u8; 64] = [ - 44, 112, 168, 208, 132, 182, 43, 252, 92, 224, 54, 65, 202, 249, 247, 42, 212, 218, 140, - 129, 191, 230, 236, 148, 135, 187, 94, 27, 239, 98, 161, 50, 24, 173, 158, 226, 158, 175, - 53, 31, 220, 80, 241, 82, 12, 66, 94, 155, 144, 138, 7, 39, 139, 67, 176, 236, 123, 135, - 39, 120, 193, 78, 7, 132, - ]; - - #[test] - fn verifies_valid_signature_with_low_s_value() { - let valid = - verify_secp256r1_ecdsa_signature(&HASHED_MESSAGE, &PUB_KEY_X, &PUB_KEY_Y, &SIGNATURE); - - assert!(valid); - } - - #[test] - fn rejects_invalid_signature() { - // This signature is invalid as ECDSA specifies that `r` and `s` must be non-zero. - let invalid_signature: [u8; 64] = [0x00; 64]; - - let valid = verify_secp256r1_ecdsa_signature( - &HASHED_MESSAGE, - &PUB_KEY_X, - &PUB_KEY_Y, - &invalid_signature, - ); - assert!(!valid); - } - - #[test] - fn rejects_invalid_public_key() { - let invalid_pub_key_x: [u8; 32] = [0xff; 32]; - let invalid_pub_key_y: [u8; 32] = [0xff; 32]; - - let valid = verify_secp256r1_ecdsa_signature( - &HASHED_MESSAGE, - &invalid_pub_key_x, - &invalid_pub_key_y, - &SIGNATURE, - ); - - assert!(!valid); - } - - #[test] - #[ignore = "ECDSA verification does not currently handle long hashes correctly"] - fn trims_overly_long_hashes_to_correct_length() { - let mut long_hashed_message = HASHED_MESSAGE.to_vec(); - long_hashed_message.push(0xff); - - let valid = verify_secp256r1_ecdsa_signature( - &long_hashed_message, - &PUB_KEY_X, - &PUB_KEY_Y, - &SIGNATURE, - ); - - assert!(valid); - } -} diff --git a/acvm-repo/bn254_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml index ea601a6b80f..a0a15409604 100644 --- a/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -32,7 +32,7 @@ ark-ff = { version = "^0.4.0", default-features = false } num-bigint.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] -wasmer = { version = "4.2.3", default-features = false, features = [ +wasmer = { version = "4.2.6", default-features = false, features = [ "js-default", ] } @@ -42,7 +42,7 @@ js-sys.workspace = true [target.'cfg(not(target_arch = "wasm32"))'.dependencies] getrandom.workspace = true -wasmer = "4.2.3" +wasmer = "4.2.6" [build-dependencies] pkg-config = "0.3" diff --git a/acvm-repo/brillig/Cargo.toml b/acvm-repo/brillig/Cargo.toml index 8d91d19e117..57f89e091b4 100644 --- a/acvm-repo/brillig/Cargo.toml +++ b/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig_vm/Cargo.toml b/acvm-repo/brillig_vm/Cargo.toml index 272e8389413..1c7add5cb40 100644 --- a/acvm-repo/brillig_vm/Cargo.toml +++ b/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.40.0" +version = "0.41.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index f9df3f10129..1f3546cbb6a 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -1,27 +1,26 @@ -use std::borrow::{Borrow, BorrowMut}; -use std::vec; +mod transforms; +mod utils; + +use transforms::{ + compute_note_hash_and_nullifier::inject_compute_note_hash_and_nullifier, + events::{generate_selector_impl, transform_events}, + functions::{transform_function, transform_unconstrained, transform_vm_function}, + storage::{ + assign_storage_slots, check_for_storage_definition, check_for_storage_implementation, + generate_storage_implementation, + }, +}; -use convert_case::{Case, Casing}; -use iter_extended::vecmap; -use noirc_errors::{Location, Spanned}; use noirc_frontend::hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}; -use noirc_frontend::hir::def_map::{LocalModuleId, ModuleId}; -use noirc_frontend::macros_api::parse_program; -use noirc_frontend::macros_api::FieldElement; -use noirc_frontend::macros_api::{ - BlockExpression, CallExpression, CastExpression, Distinctness, Expression, ExpressionKind, - ForLoopStatement, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, - HirContext, HirExpression, HirLiteral, HirStatement, Ident, IndexExpression, LetStatement, - Literal, MemberAccessExpression, MethodCallExpression, NoirFunction, NoirStruct, Param, Path, - PathKind, Pattern, PrefixExpression, SecondaryAttribute, Signedness, Span, Statement, - StatementKind, StructType, Type, TypeImpl, UnaryOp, UnresolvedType, UnresolvedTypeData, - Visibility, -}; -use noirc_frontend::macros_api::{CrateId, FileId}; -use noirc_frontend::macros_api::{MacroError, MacroProcessor}; -use noirc_frontend::macros_api::{ModuleDefId, NodeInterner, SortedModule, StructId}; -use noirc_frontend::node_interner::{FuncId, TraitId, TraitImplId, TraitImplKind}; -use noirc_frontend::{BinaryOpKind, ConstrainKind, ConstrainStatement, InfixExpression, Lambda}; + +use noirc_frontend::macros_api::SortedModule; +use noirc_frontend::macros_api::{CrateId, MacroError}; +use noirc_frontend::macros_api::{FileId, MacroProcessor}; +use noirc_frontend::macros_api::{HirContext, SecondaryAttribute, Span}; + +use utils::ast_utils::is_custom_attribute; +use utils::checks::{check_for_aztec_dependency, has_aztec_dependency}; +use utils::{constants::MAX_CONTRACT_PRIVATE_FUNCTIONS, errors::AztecMacroError}; pub struct AztecMacro; impl MacroProcessor for AztecMacro { @@ -34,23 +33,14 @@ impl MacroProcessor for AztecMacro { transform(ast, crate_id, context) } - fn process_unresolved_traits_impls( + fn process_collected_defs( &self, crate_id: &CrateId, context: &mut HirContext, - unresolved_traits_impls: &[UnresolvedTraitImpl], - collected_functions: &mut Vec, + collected_trait_impls: &[UnresolvedTraitImpl], + collected_functions: &mut [UnresolvedFunctions], ) -> Result<(), (MacroError, FileId)> { - if has_aztec_dependency(crate_id, context) { - inject_compute_note_hash_and_nullifier( - crate_id, - context, - unresolved_traits_impls, - collected_functions, - ) - } else { - Ok(()) - } + transform_collected_defs(crate_id, context, collected_trait_impls, collected_functions) } fn process_typed_ast( @@ -62,225 +52,6 @@ impl MacroProcessor for AztecMacro { } } -const FUNCTION_TREE_HEIGHT: u32 = 5; -const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); - -#[derive(Debug, Clone)] -pub enum AztecMacroError { - AztecDepNotFound, - ContractHasTooManyPrivateFunctions { span: Span }, - ContractConstructorMissing { span: Span }, - UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, - UnsupportedStorageType { span: Option, typ: UnresolvedTypeData }, - CouldNotAssignStorageSlots { secondary_message: Option }, - EventError { span: Span, message: String }, - UnsupportedAttributes { span: Span, secondary_message: Option }, -} - -impl From for MacroError { - fn from(err: AztecMacroError) -> Self { - match err { - AztecMacroError::AztecDepNotFound {} => MacroError { - primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml. For more information go to https://docs.aztec.network/developers/debugging/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), - secondary_message: None, - span: None, - }, - AztecMacroError::ContractHasTooManyPrivateFunctions { span } => MacroError { - primary_message: format!("Contract can only have a maximum of {} private functions", MAX_CONTRACT_PRIVATE_FUNCTIONS), - secondary_message: None, - span: Some(span), - }, - AztecMacroError::ContractConstructorMissing { span } => MacroError { - primary_message: "Contract must have a constructor function".to_owned(), - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedFunctionArgumentType { span, typ } => MacroError { - primary_message: format!("Provided parameter type `{typ:?}` is not supported in Aztec contract interface"), - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedStorageType { span, typ } => MacroError { - primary_message: format!("Provided storage type `{typ:?}` is not directly supported in Aztec. Please provide a custom storage implementation"), - secondary_message: None, - span, - }, - AztecMacroError::CouldNotAssignStorageSlots { secondary_message } => MacroError { - primary_message: "Could not assign storage slots, please provide a custom storage implementation".to_string(), - secondary_message, - span: None, - }, - AztecMacroError::EventError { span, message } => MacroError { - primary_message: message, - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedAttributes { span, secondary_message } => MacroError { - primary_message: "Unsupported attributes in contract function".to_string(), - secondary_message, - span: Some(span), - }, - } - } -} - -// -// Helper macros for creating noir ast nodes -// -fn ident(name: &str) -> Ident { - Ident::new(name.to_string(), Span::default()) -} - -fn ident_path(name: &str) -> Path { - Path::from_ident(ident(name)) -} - -fn path(ident: Ident) -> Path { - Path::from_ident(ident) -} - -fn expression(kind: ExpressionKind) -> Expression { - Expression::new(kind, Span::default()) -} - -fn variable(name: &str) -> Expression { - expression(ExpressionKind::Variable(ident_path(name))) -} - -fn variable_ident(identifier: Ident) -> Expression { - expression(ExpressionKind::Variable(path(identifier))) -} - -fn variable_path(path: Path) -> Expression { - expression(ExpressionKind::Variable(path)) -} - -fn method_call(object: Expression, method_name: &str, arguments: Vec) -> Expression { - expression(ExpressionKind::MethodCall(Box::new(MethodCallExpression { - object, - method_name: ident(method_name), - arguments, - }))) -} - -fn call(func: Expression, arguments: Vec) -> Expression { - expression(ExpressionKind::Call(Box::new(CallExpression { func: Box::new(func), arguments }))) -} - -fn pattern(name: &str) -> Pattern { - Pattern::Identifier(ident(name)) -} - -fn mutable(name: &str) -> Pattern { - Pattern::Mutable(Box::new(pattern(name)), Span::default(), true) -} - -fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { - make_statement(StatementKind::Let(LetStatement { - pattern: mutable(name), - r#type: make_type(UnresolvedTypeData::Unspecified), - expression: assigned_to, - })) -} - -fn mutable_reference(variable_name: &str) -> Expression { - expression(ExpressionKind::Prefix(Box::new(PrefixExpression { - operator: UnaryOp::MutableReference, - rhs: variable(variable_name), - }))) -} - -fn assignment(name: &str, assigned_to: Expression) -> Statement { - make_statement(StatementKind::Let(LetStatement { - pattern: pattern(name), - r#type: make_type(UnresolvedTypeData::Unspecified), - expression: assigned_to, - })) -} - -fn member_access(lhs: &str, rhs: &str) -> Expression { - expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { - lhs: variable(lhs), - rhs: ident(rhs), - }))) -} - -fn return_type(path: Path) -> FunctionReturnType { - let ty = make_type(UnresolvedTypeData::Named(path, vec![], true)); - FunctionReturnType::Ty(ty) -} - -fn lambda(parameters: Vec<(Pattern, UnresolvedType)>, body: Expression) -> Expression { - expression(ExpressionKind::Lambda(Box::new(Lambda { - parameters, - return_type: UnresolvedType { - typ: UnresolvedTypeData::Unspecified, - span: Some(Span::default()), - }, - body, - }))) -} - -fn make_eq(lhs: Expression, rhs: Expression) -> Expression { - expression(ExpressionKind::Infix(Box::new(InfixExpression { - lhs, - rhs, - operator: Spanned::from(Span::default(), BinaryOpKind::Equal), - }))) -} - -macro_rules! chained_path { - ( $base:expr ) => { - { - ident_path($base) - } - }; - ( $base:expr $(, $tail:expr)* ) => { - { - let mut base_path = ident_path($base); - $( - base_path.segments.push(ident($tail)); - )* - base_path - } - } -} - -macro_rules! chained_dep { - ( $base:expr $(, $tail:expr)* ) => { - { - let mut base_path = ident_path($base); - base_path.kind = PathKind::Dep; - $( - base_path.segments.push(ident($tail)); - )* - base_path - } - } -} - -fn cast(lhs: Expression, ty: UnresolvedTypeData) -> Expression { - expression(ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type: make_type(ty) }))) -} - -fn make_type(typ: UnresolvedTypeData) -> UnresolvedType { - UnresolvedType { typ, span: Some(Span::default()) } -} - -fn index_array(array: Ident, index: &str) -> Expression { - expression(ExpressionKind::Index(Box::new(IndexExpression { - collection: variable_path(path(array)), - index: variable(index), - }))) -} - -fn index_array_variable(array: Expression, index: &str) -> Expression { - expression(ExpressionKind::Index(Box::new(IndexExpression { - collection: array, - index: variable(index), - }))) -} - // // Create AST Nodes for Aztec // @@ -293,7 +64,6 @@ fn transform( context: &HirContext, ) -> Result { // Usage -> mut ast -> aztec_library::transform(&mut ast) - // Covers all functions in the ast for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { if transform_module(&mut submodule.contents, crate_id, context) @@ -305,97 +75,6 @@ fn transform( Ok(ast) } -// -// Transform Hir Nodes for Aztec -// - -/// Completes the Hir with data gathered from type resolution -fn transform_hir( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - transform_events(crate_id, context)?; - assign_storage_slots(crate_id, context) -} - -/// Creates an error alerting the user that they have not downloaded the Aztec-noir library -fn check_for_aztec_dependency( - crate_id: &CrateId, - context: &HirContext, -) -> Result<(), (MacroError, FileId)> { - if has_aztec_dependency(crate_id, context) { - Ok(()) - } else { - Err((AztecMacroError::AztecDepNotFound.into(), context.crate_graph[crate_id].root_file_id)) - } -} - -fn has_aztec_dependency(crate_id: &CrateId, context: &HirContext) -> bool { - context.crate_graph[crate_id].dependencies.iter().any(|dep| dep.as_name() == "aztec") -} - -// Check to see if the user has defined a storage struct -fn check_for_storage_definition(module: &SortedModule) -> bool { - module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") -} - -// Check to see if the user has defined a storage struct -fn check_for_storage_implementation(module: &SortedModule) -> bool { - module.impls.iter().any(|r#impl| match &r#impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => { - path.segments.last().is_some_and(|segment| segment.0.contents == "Storage") - } - _ => false, - }) -} - -// Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,Field,[Field; N]) -> [Field; 4]" is defined -fn check_for_compute_note_hash_and_nullifier_definition( - functions_data: &[(LocalModuleId, FuncId, NoirFunction)], - module_id: LocalModuleId, -) -> bool { - functions_data.iter().filter(|func_data| func_data.0 == module_id).any(|func_data| { - func_data.2.def.name.0.contents == "compute_note_hash_and_nullifier" - && func_data.2.def.parameters.len() == 5 - && match &func_data.2.def.parameters[0].typ.typ { - UnresolvedTypeData::Named(path, _, _) => path.segments.last().unwrap().0.contents == "AztecAddress", - _ => false, - } - && func_data.2.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement - && func_data.2.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement - && func_data.2.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement - // checks if the 5th parameter is an array and the Box in - // Array(Option, Box) contains only fields - && match &func_data.2.def.parameters[4].typ.typ { - UnresolvedTypeData::Array(_, inner_type) => { - matches!(inner_type.typ, UnresolvedTypeData::FieldElement) - }, - _ => false, - } - // We check the return type the same way as we did the 5th parameter - && match &func_data.2.def.return_type { - FunctionReturnType::Default(_) => false, - FunctionReturnType::Ty(unresolved_type) => { - match &unresolved_type.typ { - UnresolvedTypeData::Array(_, inner_type) => { - matches!(inner_type.typ, UnresolvedTypeData::FieldElement) - }, - _ => false, - } - } - } - }) -} - -/// Checks if an attribute is a custom attribute with a specific name -fn is_custom_attribute(attr: &SecondaryAttribute, attribute_name: &str) -> bool { - if let SecondaryAttribute::Custom(custom_attr) = attr { - custom_attr.as_str() == attribute_name - } else { - false - } -} - /// Determines if ast nodes are annotated with aztec attributes. /// For annotated functions it calls the `transform` function which will perform the required transformations. /// Returns true if an annotated node is found, false otherwise @@ -472,10 +151,7 @@ fn transform_module( transform_vm_function(func, storage_defined) .map_err(|err| (err, crate_graph.root_file_id))?; has_transformed_module = true; - } - - // Add the storage struct to the beginning of the function if it is unconstrained in an aztec contract - if storage_defined && func.def.is_unconstrained { + } else if storage_defined && func.def.is_unconstrained { transform_unconstrained(func); has_transformed_module = true; } @@ -517,1402 +193,33 @@ fn transform_module( Ok(has_transformed_module) } -/// Auxiliary function to generate the storage constructor for a given field, using -/// the Storage definition as a reference. Supports nesting. -fn generate_storage_field_constructor( - (type_ident, unresolved_type): &(Ident, UnresolvedType), - slot: Expression, -) -> Result { - let typ = &unresolved_type.typ; - match typ { - UnresolvedTypeData::Named(path, generics, _) => { - let mut new_path = path.clone().to_owned(); - new_path.segments.push(ident("new")); - match path.segments.last().unwrap().0.contents.as_str() { - "Map" => Ok(call( - variable_path(new_path), - vec![ - variable("context"), - slot, - lambda( - vec![ - ( - pattern("context"), - make_type(UnresolvedTypeData::Named( - chained_dep!("aztec", "context", "Context"), - vec![], - true, - )), - ), - ( - Pattern::Identifier(ident("slot")), - make_type(UnresolvedTypeData::FieldElement), - ), - ], - generate_storage_field_constructor( - &(type_ident.clone(), generics.iter().last().unwrap().clone()), - variable("slot"), - )?, - ), - ], - )), - _ => Ok(call(variable_path(new_path), vec![variable("context"), slot])), - } - } - _ => Err(AztecMacroError::UnsupportedStorageType { - typ: typ.clone(), - span: Some(type_ident.span()), - }), - } -} - -// Generates the Storage implementation block from the Storage struct definition if it does not exist -/// From: -/// -/// struct Storage { -/// a_map: Map>, -/// a_nested_map: Map>>, -/// a_field: SomeStoragePrimitive, -/// } -/// -/// To: -/// -/// impl Storage { -/// fn init(context: Context) -> Self { -/// Storage { -/// a_map: Map::new(context, 0, |context, slot| { -/// SomeStoragePrimitive::new(context, slot) -/// }), -/// a_nested_map: Map::new(context, 0, |context, slot| { -/// Map::new(context, slot, |context, slot| { -/// SomeStoragePrimitive::new(context, slot) -/// }) -/// }), -/// a_field: SomeStoragePrimitive::new(context, 0), -/// } -/// } -/// } -/// -/// Storage slots are generated as 0 and will be populated using the information from the HIR -/// at a later stage. -fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), AztecMacroError> { - let definition = - module.types.iter().find(|r#struct| r#struct.name.0.contents == "Storage").unwrap(); - - let slot_zero = expression(ExpressionKind::Literal(Literal::Integer( - FieldElement::from(i128::from(0)), - false, - ))); - - let field_constructors = definition - .fields - .iter() - .flat_map(|field| { - generate_storage_field_constructor(field, slot_zero.clone()) - .map(|expression| (field.0.clone(), expression)) - }) - .collect(); - - let storage_constructor_statement = make_statement(StatementKind::Expression(expression( - ExpressionKind::constructor((chained_path!("Storage"), field_constructors)), - ))); - - let init = NoirFunction::normal(FunctionDefinition::normal( - &ident("init"), - &vec![], - &[( - ident("context"), - make_type(UnresolvedTypeData::Named( - chained_dep!("aztec", "context", "Context"), - vec![], - true, - )), - )], - &BlockExpression(vec![storage_constructor_statement]), - &[], - &return_type(chained_path!("Self")), - )); - - let storage_impl = TypeImpl { - object_type: UnresolvedType { - typ: UnresolvedTypeData::Named(chained_path!("Storage"), vec![], true), - span: Some(Span::default()), - }, - type_span: Span::default(), - generics: vec![], - methods: vec![(init, Span::default())], - }; - module.impls.push(storage_impl); - - Ok(()) -} - -/// If it does, it will insert the following things: -/// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs -/// - Hashes all of the function input variables -/// - This instantiates a helper function -fn transform_function( - ty: &str, - func: &mut NoirFunction, - storage_defined: bool, - is_initializer: bool, - insert_init_check: bool, - is_internal: bool, -) -> Result<(), AztecMacroError> { - let context_name = format!("{}Context", ty); - let inputs_name = format!("{}ContextInputs", ty); - let return_type_name = format!("{}CircuitPublicInputs", ty); - - // Add check that msg sender equals this address and flag function as internal - if is_internal { - let is_internal_check = create_internal_check(func.name()); - func.def.body.0.insert(0, is_internal_check); - func.def.is_internal = true; - } - - // Add initialization check - if insert_init_check { - let init_check = create_init_check(); - func.def.body.0.insert(0, init_check); - } - - // Add access to the storage struct - if storage_defined { - let storage_def = abstract_storage(&ty.to_lowercase(), false); - func.def.body.0.insert(0, storage_def); - } - - // Insert the context creation as the first action - let create_context = create_context(&context_name, &func.def.parameters)?; - func.def.body.0.splice(0..0, (create_context).iter().cloned()); - - // Add the inputs to the params - let input = create_inputs(&inputs_name); - func.def.parameters.insert(0, input); - - // Abstract return types such that they get added to the kernel's return_values - if let Some(return_values) = abstract_return_values(func) { - // In case we are pushing return values to the context, we remove the statement that originated it - // This avoids running duplicate code, since blocks like if/else can be value returning statements - func.def.body.0.pop(); - // Add the new return statement - func.def.body.0.push(return_values); - } - - // Before returning mark the contract as initialized - if is_initializer { - let mark_initialized = create_mark_as_initialized(ty); - func.def.body.0.push(mark_initialized); - } - - // Push the finish method call to the end of the function - let finish_def = create_context_finish(); - func.def.body.0.push(finish_def); - - let return_type = create_return_type(&return_type_name); - func.def.return_type = return_type; - func.def.return_visibility = Visibility::Public; - - // Distinct return types are only required for private functions - // Public functions should have open auto-inferred - match ty { - "Private" => func.def.return_distinctness = Distinctness::Distinct, - "Public" => func.def.is_open = true, - _ => (), - } - - Ok(()) -} - -/// Transform a function to work with AVM bytecode -fn transform_vm_function( - func: &mut NoirFunction, - _storage_defined: bool, -) -> Result<(), AztecMacroError> { - // Push Avm context creation to the beginning of the function - let create_context = create_avm_context()?; - func.def.body.0.insert(0, create_context); - - // We want the function to be seen as a public function - func.def.is_open = true; - - // NOTE: the line below is a temporary hack to trigger external transpilation tools - // It will be removed once the transpiler is integrated into the Noir compiler - func.def.name.0.contents = format!("avm_{}", func.def.name.0.contents); - Ok(()) -} - -/// Transform Unconstrained -/// -/// Inserts the following code at the beginning of an unconstrained function -/// ```noir -/// let storage = Storage::init(Context::none()); -/// ``` -/// -/// This will allow developers to access their contract' storage struct in unconstrained functions -fn transform_unconstrained(func: &mut NoirFunction) { - func.def.body.0.insert(0, abstract_storage("Unconstrained", true)); -} - -fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec { - context - .def_map(crate_id) - .expect("ICE: Missing crate in def_map") - .modules() - .iter() - .flat_map(|(_, module)| { - module.type_definitions().filter_map(|typ| { - if let ModuleDefId::TypeId(struct_id) = typ { - Some(struct_id) - } else { - None - } - }) - }) - .collect() -} - -fn collect_traits(context: &HirContext) -> Vec { - let crates = context.crates(); - crates - .flat_map(|crate_id| context.def_map(&crate_id).map(|def_map| def_map.modules())) - .flatten() - .flat_map(|module| { - module.type_definitions().filter_map(|typ| { - if let ModuleDefId::TraitId(struct_id) = typ { - Some(struct_id) - } else { - None - } - }) - }) - .collect() -} - -/// Substitutes the signature literal that was introduced in the selector method previously with the actual signature. -fn transform_event( - struct_id: StructId, - interner: &mut NodeInterner, -) -> Result<(), (AztecMacroError, FileId)> { - let struct_type = interner.get_struct(struct_id); - let selector_id = interner - .lookup_method(&Type::Struct(struct_type.clone(), vec![]), struct_id, "selector", false) - .ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Selector method not found".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - let selector_function = interner.function(&selector_id); - - let compute_selector_statement = interner.statement( - selector_function.block(interner).statements().first().ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement not found".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?, - ); - - let compute_selector_expression = match compute_selector_statement { - HirStatement::Expression(expression_id) => match interner.expression(&expression_id) { - HirExpression::Call(hir_call_expression) => Some(hir_call_expression), - _ => None, - }, - _ => None, - } - .ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement is not a call expression".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - - let first_arg_id = compute_selector_expression.arguments.first().ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement is not a call expression".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - - match interner.expression(first_arg_id) { - HirExpression::Literal(HirLiteral::Str(signature)) - if signature == SIGNATURE_PLACEHOLDER => - { - let selector_literal_id = *first_arg_id; - - let structure = interner.get_struct(struct_id); - let signature = event_signature(&structure.borrow()); - interner.update_expression(selector_literal_id, |expr| { - *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); - }); - - // Also update the type! It might have a different length now than the placeholder. - interner.push_expr_type( - selector_literal_id, - Type::String(Box::new(Type::Constant(signature.len() as u64))), - ); - Ok(()) - } - _ => Err(( - AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Signature placeholder literal does not match".to_owned(), - }, - struct_type.borrow().location.file, - )), - } -} - -fn transform_events( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - for struct_id in collect_crate_structs(crate_id, context) { - let attributes = context.def_interner.struct_attributes(&struct_id); - if attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { - transform_event(struct_id, &mut context.def_interner)?; - } - } - Ok(()) -} - -/// Obtains the serialized length of a type that implements the Serialize trait. -fn get_serialized_length( - traits: &[TraitId], - typ: &Type, - interner: &NodeInterner, -) -> Result { - let (struct_name, maybe_stored_in_state) = match typ { - Type::Struct(struct_type, generics) => { - Ok((struct_type.borrow().name.0.contents.clone(), generics.first())) - } - _ => Err(AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("State storage variable must be a struct".to_string()), - }), - }?; - let stored_in_state = - maybe_stored_in_state.ok_or(AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("State storage variable must be generic".to_string()), - })?; - - let is_note = traits.iter().any(|&trait_id| { - let r#trait = interner.get_trait(trait_id); - r#trait.name.0.contents == "NoteInterface" - && !interner.lookup_all_trait_implementations(stored_in_state, trait_id).is_empty() - }); - - // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicMutable for whatever reason though. - if struct_name == "Map" || (is_note && struct_name != "PublicMutable") { - return Ok(1); - } - - let serialized_trait_impl_kind = traits - .iter() - .find_map(|&trait_id| { - let r#trait = interner.get_trait(trait_id); - if r#trait.borrow().name.0.contents == "Serialize" - && r#trait.borrow().generics.len() == 1 - { - interner - .lookup_all_trait_implementations(stored_in_state, trait_id) - .into_iter() - .next() - } else { - None - } - }) - .ok_or(AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("Stored data must implement Serialize trait".to_string()), - })?; - - let serialized_trait_impl_id = match serialized_trait_impl_kind { - TraitImplKind::Normal(trait_impl_id) => Ok(trait_impl_id), - _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), - }?; - - let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); - let serialized_trait_impl = serialized_trait_impl_shared.borrow(); - - match serialized_trait_impl.trait_generics.first().unwrap() { - Type::Constant(value) => Ok(*value), - _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), - } -} - -/// Assigns storage slots to the storage struct fields based on the serialized length of the types. This automatic assignment -/// will only trigger if the assigned storage slot is invalid (0 as generated by generate_storage_implementation) -fn assign_storage_slots( +fn transform_collected_defs( crate_id: &CrateId, context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - let traits: Vec<_> = collect_traits(context); - for struct_id in collect_crate_structs(crate_id, context) { - let interner: &mut NodeInterner = context.def_interner.borrow_mut(); - let r#struct = interner.get_struct(struct_id); - let file_id = r#struct.borrow().location.file; - if r#struct.borrow().name.0.contents == "Storage" && r#struct.borrow().id.krate().is_root() - { - let init_id = interner - .lookup_method( - &Type::Struct(interner.get_struct(struct_id), vec![]), - struct_id, - "init", - false, - ) - .ok_or(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage struct must have an init function".to_string(), - ), - }, - file_id, - ))?; - let init_function = interner.function(&init_id).block(interner); - let init_function_statement_id = init_function.statements().first().ok_or(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("Init storage statement not found".to_string()), - }, - file_id, - ))?; - let storage_constructor_statement = interner.statement(init_function_statement_id); - - let storage_constructor_expression = match storage_constructor_statement { - HirStatement::Expression(expression_id) => { - match interner.expression(&expression_id) { - HirExpression::Constructor(hir_constructor_expression) => { - Ok(hir_constructor_expression) - } - _ => Err((AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage constructor statement must be a constructor expression" - .to_string(), - ), - }, file_id)) - } - } - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage constructor statement must be an expression".to_string(), - ), - }, - file_id, - )), - }?; - - let mut storage_slot: u64 = 1; - for (index, (_, expr_id)) in storage_constructor_expression.fields.iter().enumerate() { - let fields = r#struct.borrow().get_fields(&[]); - let (_, field_type) = fields.get(index).unwrap(); - let new_call_expression = match interner.expression(expr_id) { - HirExpression::Call(hir_call_expression) => Ok(hir_call_expression), - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage field initialization expression is not a call expression" - .to_string(), - ), - }, - file_id, - )), - }?; - - let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]); - - let current_storage_slot = match slot_arg_expression { - HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage slot argument expression must be a literal integer" - .to_string(), - ), - }, - file_id, - )), - }?; - - if current_storage_slot != 0 { - continue; - } - - let type_serialized_len = get_serialized_length(&traits, field_type, interner) - .map_err(|err| (err, file_id))?; - interner.update_expression(new_call_expression.arguments[1], |expr| { - *expr = HirExpression::Literal(HirLiteral::Integer( - FieldElement::from(u128::from(storage_slot)), - false, - )); - }); - - storage_slot += type_serialized_len; - } - } - } - Ok(()) -} - -const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; - -/// Generates the impl for an event selector -/// -/// Inserts the following code: -/// ```noir -/// impl SomeStruct { -/// fn selector() -> FunctionSelector { -/// aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature("SIGNATURE_PLACEHOLDER") -/// } -/// } -/// ``` -/// -/// This allows developers to emit events without having to write the signature of the event every time they emit it. -/// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. -/// It'll get resolved after by transforming the HIR. -fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { - let struct_type = - make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![], true)); - - let selector_path = - chained_dep!("aztec", "protocol_types", "abis", "function_selector", "FunctionSelector"); - let mut from_signature_path = selector_path.clone(); - from_signature_path.segments.push(ident("from_signature")); - - let selector_fun_body = BlockExpression(vec![make_statement(StatementKind::Expression(call( - variable_path(from_signature_path), - vec![expression(ExpressionKind::Literal(Literal::Str(SIGNATURE_PLACEHOLDER.to_string())))], - )))]); - - // Define `FunctionSelector` return type - let return_type = - FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![], true))); - - let mut selector_fn_def = FunctionDefinition::normal( - &ident("selector"), - &vec![], - &[], - &selector_fun_body, - &[], - &return_type, - ); - - selector_fn_def.visibility = FunctionVisibility::Public; - - // Seems to be necessary on contract modules - selector_fn_def.return_visibility = Visibility::Public; - - TypeImpl { - object_type: struct_type, - type_span: structure.span, - generics: vec![], - methods: vec![(NoirFunction::normal(selector_fn_def), Span::default())], - } -} - -/// Helper function that returns what the private context would look like in the ast -/// This should make it available to be consumed within aztec private annotated functions. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// fn foo(inputs: PrivateContextInputs) { -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -fn create_inputs(ty: &str) -> Param { - let context_ident = ident("inputs"); - let context_pattern = Pattern::Identifier(context_ident); - - let path_snippet = ty.to_case(Case::Snake); // e.g. private_context_inputs - let type_path = chained_dep!("aztec", "context", "inputs", &path_snippet, ty); - - let context_type = make_type(UnresolvedTypeData::Named(type_path, vec![], true)); - let visibility = Visibility::Private; - - Param { pattern: context_pattern, typ: context_type, visibility, span: Span::default() } -} - -/// Creates an initialization check to ensure that the contract has been initialized, meant to -/// be injected as the first statement of any function after the context has been created. -/// -/// ```noir -/// assert_is_initialized(&mut context); -/// ``` -fn create_init_check() -> Statement { - make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", "assert_is_initialized")), - vec![mutable_reference("context")], - ))) -} - -/// Creates a call to mark_as_initialized which emits the initialization nullifier, meant to -/// be injected as the last statement before returning in a constructor. -/// -/// ```noir -/// mark_as_initialized(&mut context); -/// ``` -fn create_mark_as_initialized(ty: &str) -> Statement { - let name = if ty == "Public" { "mark_as_initialized_public" } else { "mark_as_initialized" }; - make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", name)), - vec![mutable_reference("context")], - ))) -} - -/// Creates a check for internal functions ensuring that the caller is self. -/// -/// ```noir -/// assert(context.msg_sender() == context.this_address(), "Function can only be called internally"); -/// ``` -fn create_internal_check(fname: &str) -> Statement { - make_statement(StatementKind::Constrain(ConstrainStatement( - make_eq( - method_call(variable("context"), "msg_sender", vec![]), - method_call(variable("context"), "this_address", vec![]), - ), - Some(expression(ExpressionKind::Literal(Literal::Str(format!( - "Function {} can only be called internally", - fname - ))))), - ConstrainKind::Assert, - ))) -} - -/// Creates the private context object to be accessed within the function, the parameters need to be extracted to be -/// appended into the args hash object. -/// -/// The replaced code: -/// ```noir -/// #[aztec(private)] -/// fn foo(structInput: SomeStruct, arrayInput: [u8; 10], fieldInput: Field) -> Field { -/// // Create the hasher object -/// let mut hasher = Hasher::new(); -/// -/// // struct inputs call serialize on them to add an array of fields -/// hasher.add_multiple(structInput.serialize()); -/// -/// // Array inputs are iterated over and each element is added to the hasher (as a field) -/// for i in 0..arrayInput.len() { -/// hasher.add(arrayInput[i] as Field); -/// } -/// // Field inputs are added to the hasher -/// hasher.add({ident}); -/// -/// // Create the context -/// // The inputs (injected by this `create_inputs`) and completed hash object are passed to the context -/// let mut context = PrivateContext::new(inputs, hasher.hash()); -/// } -/// ``` -fn create_context(ty: &str, params: &[Param]) -> Result, AztecMacroError> { - let mut injected_expressions: Vec = vec![]; - - // `let mut hasher = Hasher::new();` - let let_hasher = mutable_assignment( - "hasher", // Assigned to - call( - variable_path(chained_dep!("aztec", "hasher", "Hasher", "new")), // Path - vec![], // args - ), - ); - - // Completes: `let mut hasher = Hasher::new();` - injected_expressions.push(let_hasher); - - // Iterate over each of the function parameters, adding to them to the hasher - for Param { pattern, typ, span, .. } in params { - match pattern { - Pattern::Identifier(identifier) => { - // Match the type to determine the padding to do - let unresolved_type = &typ.typ; - let expression = match unresolved_type { - // `hasher.add_multiple({ident}.serialize())` - UnresolvedTypeData::Named(..) => add_struct_to_hasher(identifier), - UnresolvedTypeData::Array(_, arr_type) => { - add_array_to_hasher(identifier, arr_type) - } - // `hasher.add({ident})` - UnresolvedTypeData::FieldElement => add_field_to_hasher(identifier), - // Add the integer to the hasher, casted to a field - // `hasher.add({ident} as Field)` - UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { - add_cast_to_hasher(identifier) - } - UnresolvedTypeData::String(..) => { - let (var_bytes, id) = str_to_bytes(identifier); - injected_expressions.push(var_bytes); - add_array_to_hasher( - &id, - &UnresolvedType { - typ: UnresolvedTypeData::Integer( - Signedness::Unsigned, - noirc_frontend::IntegerBitSize::ThirtyTwo, - ), - span: None, - }, - ) - } - _ => { - return Err(AztecMacroError::UnsupportedFunctionArgumentType { - typ: unresolved_type.clone(), - span: *span, - }) - } - }; - injected_expressions.push(expression); - } - _ => todo!(), // Maybe unreachable? - } - } - - // Create the inputs to the context - let inputs_expression = variable("inputs"); - // `hasher.hash()` - let hash_call = method_call( - variable("hasher"), // variable - "hash", // method name - vec![], // args - ); - - let path_snippet = ty.to_case(Case::Snake); // e.g. private_context - - // let mut context = {ty}::new(inputs, hash); - let let_context = mutable_assignment( - "context", // Assigned to - call( - variable_path(chained_dep!("aztec", "context", &path_snippet, ty, "new")), // Path - vec![inputs_expression, hash_call], // args - ), - ); - injected_expressions.push(let_context); - - // Return all expressions that will be injected by the hasher - Ok(injected_expressions) -} - -/// Creates an mutable avm context -/// -/// ```noir -/// /// Before -/// #[aztec(public-vm)] -/// fn foo() -> Field { -/// let mut context = aztec::context::AVMContext::new(); -/// let timestamp = context.timestamp(); -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() -> Field { -/// let mut timestamp = context.timestamp(); -/// // ... -/// } -fn create_avm_context() -> Result { - let let_context = mutable_assignment( - "context", // Assigned to - call( - variable_path(chained_dep!("aztec", "context", "AVMContext", "new")), // Path - vec![], // args - ), - ); - - Ok(let_context) -} - -/// Abstract Return Type -/// -/// This function intercepts the function's current return type and replaces it with pushes -/// To the kernel -/// -/// The replaced code: -/// ```noir -/// /// Before -/// #[aztec(private)] -/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { -/// // ... -/// let my_return_value: Field = 10; -/// context.return_values.push(my_return_value); -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() -> Field { -/// // ... -/// let my_return_value: Field = 10; -/// my_return_value -/// } -/// ``` -/// Similarly; Structs will be pushed to the context, after serialize() is called on them. -/// Arrays will be iterated over and each element will be pushed to the context. -/// Any primitive type that can be cast will be casted to a field and pushed to the context. -fn abstract_return_values(func: &NoirFunction) -> Option { - let current_return_type = func.return_type().typ; - let last_statement = func.def.body.0.last()?; - - // TODO: (length, type) => We can limit the size of the array returned to be limited by kernel size - // Doesn't need done until we have settled on a kernel size - // TODO: support tuples here and in inputs -> convert into an issue - // Check if the return type is an expression, if it is, we can handle it - match last_statement { - Statement { kind: StatementKind::Expression(expression), .. } => { - match current_return_type { - // Call serialize on structs, push the whole array, calling push_array - UnresolvedTypeData::Named(..) => Some(make_struct_return_type(expression.clone())), - UnresolvedTypeData::Array(..) => Some(make_array_return_type(expression.clone())), - // Cast these types to a field before pushing - UnresolvedTypeData::Bool | UnresolvedTypeData::Integer(..) => { - Some(make_castable_return_type(expression.clone())) - } - UnresolvedTypeData::FieldElement => Some(make_return_push(expression.clone())), - _ => None, - } - } - _ => None, - } -} - -/// Abstract storage -/// -/// For private functions: -/// ```noir -/// #[aztec(private)] -/// fn lol() { -/// let storage = Storage::init(Context::private(context)); -/// } -/// ``` -/// -/// For public functions: -/// ```noir -/// #[aztec(public)] -/// fn lol() { -/// let storage = Storage::init(Context::public(context)); -/// } -/// ``` -/// -/// For unconstrained functions: -/// ```noir -/// unconstrained fn lol() { -/// let storage = Storage::init(Context::none()); -/// } -fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { - let init_context_call = if unconstrained { - call( - variable_path(chained_dep!("aztec", "context", "Context", "none")), // Path - vec![], // args + collected_trait_impls: &[UnresolvedTraitImpl], + collected_functions: &mut [UnresolvedFunctions], +) -> Result<(), (MacroError, FileId)> { + if has_aztec_dependency(crate_id, context) { + inject_compute_note_hash_and_nullifier( + crate_id, + context, + collected_trait_impls, + collected_functions, ) } else { - call( - variable_path(chained_dep!("aztec", "context", "Context", typ)), // Path - vec![mutable_reference("context")], // args - ) - }; - - assignment( - "storage", // Assigned to - call( - variable_path(chained_path!("Storage", "init")), // Path - vec![init_context_call], // args - ), - ) -} - -/// Context Return Values -/// -/// Creates an instance to the context return values -/// ```noir -/// `context.return_values` -/// ``` -fn context_return_values() -> Expression { - member_access("context", "return_values") -} - -fn make_statement(kind: StatementKind) -> Statement { - Statement { span: Span::default(), kind } -} - -/// Make return Push -/// -/// Translates to: -/// `context.return_values.push({push_value})` -fn make_return_push(push_value: Expression) -> Statement { - make_statement(StatementKind::Semi(method_call( - context_return_values(), - "push", - vec![push_value], - ))) -} - -/// Make Return push array -/// -/// Translates to: -/// `context.return_values.extend_from_array({push_value})` -fn make_return_extend_from_array(push_value: Expression) -> Statement { - make_statement(StatementKind::Semi(method_call( - context_return_values(), - "extend_from_array", - vec![push_value], - ))) -} - -/// Make struct return type -/// -/// Translates to: -/// ```noir -/// `context.return_values.extend_from_array({push_value}.serialize())` -fn make_struct_return_type(expression: Expression) -> Statement { - let serialized_call = method_call( - expression, // variable - "serialize", // method name - vec![], // args - ); - make_return_extend_from_array(serialized_call) -} - -/// Make array return type -/// -/// Translates to: -/// ```noir -/// for i in 0..{ident}.len() { -/// context.return_values.push({ident}[i] as Field) -/// } -/// ``` -fn make_array_return_type(expression: Expression) -> Statement { - let inner_cast_expression = - cast(index_array_variable(expression.clone(), "i"), UnresolvedTypeData::FieldElement); - let assignment = make_statement(StatementKind::Semi(method_call( - context_return_values(), // variable - "push", // method name - vec![inner_cast_expression], - ))); - - create_loop_over(expression, vec![assignment]) -} - -/// Castable return type -/// -/// Translates to: -/// ```noir -/// context.return_values.push({ident} as Field) -/// ``` -fn make_castable_return_type(expression: Expression) -> Statement { - // Cast these types to a field before pushing - let cast_expression = cast(expression, UnresolvedTypeData::FieldElement); - make_return_push(cast_expression) -} - -/// Create Return Type -/// -/// Public functions return protocol_types::abis::public_circuit_public_inputs::PublicCircuitPublicInputs while -/// private functions return protocol_types::abis::private_circuit_public_inputs::::PrivateCircuitPublicInputs -/// -/// This call constructs an ast token referencing the above types -/// The name is set in the function above `transform`, hence the -/// whole token name is passed in -/// -/// The replaced code: -/// ```noir -/// -/// /// Before -/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -fn create_return_type(ty: &str) -> FunctionReturnType { - let path_snippet = ty.to_case(Case::Snake); // e.g. private_circuit_public_inputs or public_circuit_public_inputs - let return_path = chained_dep!("aztec", "protocol_types", "abis", &path_snippet, ty); - return_type(return_path) -} - -/// Create Context Finish -/// -/// Each aztec function calls `context.finish()` at the end of a function -/// to return values required by the kernel. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { -/// // ... -/// context.finish() -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -fn create_context_finish() -> Statement { - let method_call = method_call( - variable("context"), // variable - "finish", // method name - vec![], // args - ); - make_statement(StatementKind::Expression(method_call)) + Ok(()) + } } // -// Methods to create hasher inputs +// Transform Hir Nodes for Aztec // -fn add_struct_to_hasher(identifier: &Ident) -> Statement { - // If this is a struct, we call serialize and add the array to the hasher - let serialized_call = method_call( - variable_path(path(identifier.clone())), // variable - "serialize", // method name - vec![], // args - ); - - make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - "add_multiple", // method name - vec![serialized_call], // args - ))) -} - -fn str_to_bytes(identifier: &Ident) -> (Statement, Ident) { - // let identifier_as_bytes = identifier.as_bytes(); - let var = variable_ident(identifier.clone()); - let contents = if let ExpressionKind::Variable(p) = &var.kind { - p.segments.first().cloned().unwrap_or_else(|| panic!("No segments")).0.contents - } else { - panic!("Unexpected identifier type") - }; - let bytes_name = format!("{}_bytes", contents); - let var_bytes = assignment(&bytes_name, method_call(var, "as_bytes", vec![])); - let id = Ident::new(bytes_name, Span::default()); - - (var_bytes, id) -} - -fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the hasher - // casted to a field - let span = var.span; - - // `array.len()` - let end_range_expression = method_call( - var, // variable - "len", // method name - vec![], // args - ); - - // What will be looped over - // - `hasher.add({ident}[i] as Field)` - let for_loop_block = expression(ExpressionKind::Block(BlockExpression(loop_body))); - - // `for i in 0..{ident}.len()` - make_statement(StatementKind::For(ForLoopStatement { - range: ForRange::Range( - expression(ExpressionKind::Literal(Literal::Integer( - FieldElement::from(i128::from(0)), - false, - ))), - end_range_expression, - ), - identifier: ident("i"), - block: for_loop_block, - span, - })) -} - -fn add_array_to_hasher(identifier: &Ident, arr_type: &UnresolvedType) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the hasher - // casted to a field - - // Wrap in the semi thing - does that mean ended with semi colon? - // `hasher.add({ident}[i] as Field)` - - let arr_index = index_array(identifier.clone(), "i"); - let (add_expression, hasher_method_name) = match arr_type.typ { - UnresolvedTypeData::Named(..) => { - let hasher_method_name = "add_multiple".to_owned(); - let call = method_call( - // All serialize on each element - arr_index, // variable - "serialize", // method name - vec![], // args - ); - (call, hasher_method_name) - } - _ => { - let hasher_method_name = "add".to_owned(); - let call = cast( - arr_index, // lhs - `ident[i]` - UnresolvedTypeData::FieldElement, // cast to - `as Field` - ); - (call, hasher_method_name) - } - }; - - let block_statement = make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - &hasher_method_name, // method name - vec![add_expression], - ))); - - create_loop_over(variable_ident(identifier.clone()), vec![block_statement]) -} - -fn add_field_to_hasher(identifier: &Ident) -> Statement { - // `hasher.add({ident})` - let ident = variable_path(path(identifier.clone())); - make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - "add", // method name - vec![ident], // args - ))) -} - -fn add_cast_to_hasher(identifier: &Ident) -> Statement { - // `hasher.add({ident} as Field)` - // `{ident} as Field` - let cast_operation = cast( - variable_path(path(identifier.clone())), // lhs - UnresolvedTypeData::FieldElement, // rhs - ); - - // `hasher.add({ident} as Field)` - make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - "add", // method name - vec![cast_operation], // args - ))) -} - -/// Computes the aztec signature for a resolved type. -fn signature_of_type(typ: &Type) -> String { - match typ { - Type::Integer(Signedness::Signed, bit_size) => format!("i{}", bit_size), - Type::Integer(Signedness::Unsigned, bit_size) => format!("u{}", bit_size), - Type::FieldElement => "Field".to_owned(), - Type::Bool => "bool".to_owned(), - Type::Array(len, typ) => { - if let Type::Constant(len) = **len { - format!("[{};{len}]", signature_of_type(typ)) - } else { - unimplemented!("Cannot generate signature for array with length type {:?}", typ) - } - } - Type::Struct(def, args) => { - let fields = def.borrow().get_fields(args); - let fields = vecmap(fields, |(_, typ)| signature_of_type(&typ)); - format!("({})", fields.join(",")) - } - Type::Tuple(types) => { - let fields = vecmap(types, signature_of_type); - format!("({})", fields.join(",")) - } - _ => unimplemented!("Cannot generate signature for type {:?}", typ), - } -} - -/// Computes the signature for a resolved event type. -/// It has the form 'EventName(Field,(Field),[u8;2])' -fn event_signature(event: &StructType) -> String { - let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); - format!("{}({})", event.name.0.contents, fields.join(",")) -} - -fn inject_compute_note_hash_and_nullifier( +/// Completes the Hir with data gathered from type resolution +fn transform_hir( crate_id: &CrateId, context: &mut HirContext, - unresolved_traits_impls: &[UnresolvedTraitImpl], - collected_functions: &mut [UnresolvedFunctions], -) -> Result<(), (MacroError, FileId)> { - // We first fetch modules in this crate which correspond to contracts, along with their file id. - let contract_module_file_ids: Vec<(LocalModuleId, FileId)> = context - .def_map(crate_id) - .expect("ICE: Missing crate in def_map") - .modules() - .iter() - .filter(|(_, module)| module.is_contract) - .map(|(idx, module)| (LocalModuleId(idx), module.location.file)) - .collect(); - - // If the current crate does not contain a contract module we simply skip it. - if contract_module_file_ids.is_empty() { - return Ok(()); - } else if contract_module_file_ids.len() != 1 { - panic!("Found multiple contracts in the same crate"); - } - - let (module_id, file_id) = contract_module_file_ids[0]; - - // If compute_note_hash_and_nullifier is already defined by the user, we skip auto-generation in order to provide an - // escape hatch for this mechanism. - // TODO(#4647): improve this diagnosis and error messaging. - if collected_functions.iter().any(|coll_funcs_data| { - check_for_compute_note_hash_and_nullifier_definition(&coll_funcs_data.functions, module_id) - }) { - return Ok(()); - } - - // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the - // contract might use. These are the types that implement the NoteInterface trait, which provides the - // get_note_type_id function. - let note_types = fetch_struct_trait_impls(context, unresolved_traits_impls, "NoteInterface"); - - // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. - let func = generate_compute_note_hash_and_nullifier(¬e_types); - - // And inject the newly created function into the contract. - - // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply - // pass an empty span. This function should not produce errors anyway so this should not matter. - let location = Location::new(Span::empty(0), file_id); - - // These are the same things the ModCollector does when collecting functions: we push the function to the - // NodeInterner, declare it in the module (which checks for duplicate definitions), and finally add it to the list - // on collected but unresolved functions. - - let func_id = context.def_interner.push_empty_fn(); - context.def_interner.push_function( - func_id, - &func.def, - ModuleId { krate: *crate_id, local_id: module_id }, - location, - ); - - context.def_map_mut(crate_id).unwrap() - .modules_mut()[module_id.0] - .declare_function( - func.name_ident().clone(), func_id - ).expect( - "Failed to declare the autogenerated compute_note_hash_and_nullifier function, likely due to a duplicate definition. See https://github.com/AztecProtocol/aztec-packages/issues/4647." - ); - - collected_functions - .iter_mut() - .find(|fns| fns.file_id == file_id) - .expect("ICE: no functions found in contract file") - .push_fn(module_id, func_id, func.clone()); - - Ok(()) -} - -// Fetches the name of all structs that implement trait_name, both in the current crate and all of its dependencies. -fn fetch_struct_trait_impls( - context: &mut HirContext, - unresolved_traits_impls: &[UnresolvedTraitImpl], - trait_name: &str, -) -> Vec { - let mut struct_typenames: Vec = Vec::new(); - - // These structs can be declared in either external crates or the current one. External crates that contain - // dependencies have already been processed and resolved, but are available here via the NodeInterner. Note that - // crates on which the current crate does not depend on may not have been processed, and will be ignored. - for trait_impl_id in 0..context.def_interner.next_trait_impl_id().0 { - let trait_impl = &context.def_interner.get_trait_implementation(TraitImplId(trait_impl_id)); - - if trait_impl.borrow().ident.0.contents == *trait_name { - if let Type::Struct(s, _) = &trait_impl.borrow().typ { - struct_typenames.push(s.borrow().name.0.contents.clone()); - } else { - panic!("Found impl for {} on non-Struct", trait_name); - } - } - } - - // This crate's traits and impls have not yet been resolved, so we look for impls in unresolved_trait_impls. - struct_typenames.extend( - unresolved_traits_impls - .iter() - .filter(|trait_impl| { - trait_impl - .trait_path - .segments - .last() - .expect("ICE: empty trait_impl path") - .0 - .contents - == *trait_name - }) - .filter_map(|trait_impl| match &trait_impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => { - Some(path.segments.last().unwrap().0.contents.clone()) - } - _ => None, - }), - ); - - struct_typenames -} - -fn generate_compute_note_hash_and_nullifier(note_types: &Vec) -> NoirFunction { - let function_source = generate_compute_note_hash_and_nullifier_source(note_types); - - let (function_ast, errors) = parse_program(&function_source); - if !errors.is_empty() { - dbg!(errors.clone()); - } - assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); - - let mut function_ast = function_ast.into_sorted(); - function_ast.functions.remove(0) -} - -fn generate_compute_note_hash_and_nullifier_source(note_types: &Vec) -> String { - // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. - // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. - - if note_types.is_empty() { - // TODO(#4520): Even if the contract does not include any notes, other parts of the stack expect for this - // function to exist, so we include a dummy version. We likely should error out here instead. - " - unconstrained fn compute_note_hash_and_nullifier( - contract_address: AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - serialized_note: [Field; 20] - ) -> pub [Field; 4] { - [0, 0, 0, 0] - }" - .to_string() - } else { - // For contracts that include notes we do a simple if-else chain comparing note_type_id with the different - // get_note_type_id of each of the note types. - - let if_statements: Vec = note_types.iter().map(|note_type| format!( - "if (note_type_id == {0}::get_note_type_id()) {{ - dep::aztec::note::utils::compute_note_hash_and_nullifier({0}::deserialize_content, note_header, serialized_note) - }}" - , note_type)).collect(); - - // TODO(#4520): error out on the else instead of returning a zero array - let full_if_statement = if_statements.join(" else ") - + " - else { - [0, 0, 0, 0] - }"; - - format!( - " - unconstrained fn compute_note_hash_and_nullifier( - contract_address: AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - serialized_note: [Field; 20] - ) -> pub [Field; 4] {{ - let note_header = dep::aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); - - {} - }}", - full_if_statement - ) - } +) -> Result<(), (AztecMacroError, FileId)> { + transform_events(crate_id, context)?; + assign_storage_slots(crate_id, context) } diff --git a/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs b/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs new file mode 100644 index 00000000000..4f8f3f19ab8 --- /dev/null +++ b/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs @@ -0,0 +1,195 @@ +use noirc_errors::{Location, Span}; +use noirc_frontend::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}, + def_map::{LocalModuleId, ModuleId}, + }, + macros_api::{FileId, HirContext, MacroError}, + node_interner::FuncId, + parse_program, FunctionReturnType, NoirFunction, UnresolvedTypeData, +}; + +use crate::utils::hir_utils::fetch_struct_trait_impls; + +// Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,Field,[Field; N]) -> [Field; 4]" is defined +fn check_for_compute_note_hash_and_nullifier_definition( + functions_data: &[(LocalModuleId, FuncId, NoirFunction)], + module_id: LocalModuleId, +) -> bool { + functions_data.iter().filter(|func_data| func_data.0 == module_id).any(|func_data| { + func_data.2.def.name.0.contents == "compute_note_hash_and_nullifier" + && func_data.2.def.parameters.len() == 5 + && match &func_data.2.def.parameters[0].typ.typ { + UnresolvedTypeData::Named(path, _, _) => path.segments.last().unwrap().0.contents == "AztecAddress", + _ => false, + } + && func_data.2.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement + // checks if the 5th parameter is an array and the Box in + // Array(Option, Box) contains only fields + && match &func_data.2.def.parameters[4].typ.typ { + UnresolvedTypeData::Array(_, inner_type) => { + matches!(inner_type.typ, UnresolvedTypeData::FieldElement) + }, + _ => false, + } + // We check the return type the same way as we did the 5th parameter + && match &func_data.2.def.return_type { + FunctionReturnType::Default(_) => false, + FunctionReturnType::Ty(unresolved_type) => { + match &unresolved_type.typ { + UnresolvedTypeData::Array(_, inner_type) => { + matches!(inner_type.typ, UnresolvedTypeData::FieldElement) + }, + _ => false, + } + } + } + }) +} + +pub fn inject_compute_note_hash_and_nullifier( + crate_id: &CrateId, + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + collected_functions: &mut [UnresolvedFunctions], +) -> Result<(), (MacroError, FileId)> { + // We first fetch modules in this crate which correspond to contracts, along with their file id. + let contract_module_file_ids: Vec<(LocalModuleId, FileId)> = context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .filter(|(_, module)| module.is_contract) + .map(|(idx, module)| (LocalModuleId(idx), module.location.file)) + .collect(); + + // If the current crate does not contain a contract module we simply skip it. + if contract_module_file_ids.is_empty() { + return Ok(()); + } else if contract_module_file_ids.len() != 1 { + panic!("Found multiple contracts in the same crate"); + } + + let (module_id, file_id) = contract_module_file_ids[0]; + + // If compute_note_hash_and_nullifier is already defined by the user, we skip auto-generation in order to provide an + // escape hatch for this mechanism. + // TODO(#4647): improve this diagnosis and error messaging. + if collected_functions.iter().any(|coll_funcs_data| { + check_for_compute_note_hash_and_nullifier_definition(&coll_funcs_data.functions, module_id) + }) { + return Ok(()); + } + + // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the + // contract might use. These are the types that implement the NoteInterface trait, which provides the + // get_note_type_id function. + let note_types = fetch_struct_trait_impls(context, unresolved_traits_impls, "NoteInterface"); + + // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. + let func = generate_compute_note_hash_and_nullifier(¬e_types); + + // And inject the newly created function into the contract. + + // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply + // pass an empty span. This function should not produce errors anyway so this should not matter. + let location = Location::new(Span::empty(0), file_id); + + // These are the same things the ModCollector does when collecting functions: we push the function to the + // NodeInterner, declare it in the module (which checks for duplicate definitions), and finally add it to the list + // on collected but unresolved functions. + + let func_id = context.def_interner.push_empty_fn(); + context.def_interner.push_function( + func_id, + &func.def, + ModuleId { krate: *crate_id, local_id: module_id }, + location, + ); + + context.def_map_mut(crate_id).unwrap() + .modules_mut()[module_id.0] + .declare_function( + func.name_ident().clone(), func_id + ).expect( + "Failed to declare the autogenerated compute_note_hash_and_nullifier function, likely due to a duplicate definition. See https://github.com/AztecProtocol/aztec-packages/issues/4647." + ); + + collected_functions + .iter_mut() + .find(|fns| fns.file_id == file_id) + .expect("ICE: no functions found in contract file") + .push_fn(module_id, func_id, func.clone()); + + Ok(()) +} + +fn generate_compute_note_hash_and_nullifier(note_types: &Vec) -> NoirFunction { + let function_source = generate_compute_note_hash_and_nullifier_source(note_types); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors.clone()); + } + assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); + + let mut function_ast = function_ast.into_sorted(); + function_ast.functions.remove(0) +} + +fn generate_compute_note_hash_and_nullifier_source(note_types: &Vec) -> String { + // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. + // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. + + if note_types.is_empty() { + // Even if the contract does not include any notes, other parts of the stack expect for this function to exist, + // so we include a dummy version. + " + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + note_type_id: Field, + serialized_note: [Field; 20] + ) -> pub [Field; 4] { + assert(false, \"This contract does not use private notes\"); + [0, 0, 0, 0] + }" + .to_string() + } else { + // For contracts that include notes we do a simple if-else chain comparing note_type_id with the different + // get_note_type_id of each of the note types. + + let if_statements: Vec = note_types.iter().map(|note_type| format!( + "if (note_type_id == {0}::get_note_type_id()) {{ + dep::aztec::note::utils::compute_note_hash_and_nullifier({0}::deserialize_content, note_header, serialized_note) + }}" + , note_type)).collect(); + + let full_if_statement = if_statements.join(" else ") + + " + else { + assert(false, \"Unknown note type ID\"); + [0, 0, 0, 0] + }"; + + format!( + " + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + note_type_id: Field, + serialized_note: [Field; 20] + ) -> pub [Field; 4] {{ + let note_header = dep::aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); + + {} + }}", + full_if_statement + ) + } +} diff --git a/aztec_macros/src/transforms/events.rs b/aztec_macros/src/transforms/events.rs new file mode 100644 index 00000000000..b02709efacb --- /dev/null +++ b/aztec_macros/src/transforms/events.rs @@ -0,0 +1,178 @@ +use iter_extended::vecmap; +use noirc_errors::Span; +use noirc_frontend::{ + graph::CrateId, + macros_api::{ + BlockExpression, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, + NoirStruct, PathKind, StatementKind, StructId, StructType, Type, TypeImpl, + UnresolvedTypeData, + }, + token::SecondaryAttribute, + ExpressionKind, FunctionDefinition, FunctionReturnType, ItemVisibility, Literal, NoirFunction, + Visibility, +}; + +use crate::{ + chained_dep, + utils::{ + ast_utils::{ + call, expression, ident, ident_path, make_statement, make_type, path, variable_path, + }, + constants::SIGNATURE_PLACEHOLDER, + errors::AztecMacroError, + hir_utils::{collect_crate_structs, signature_of_type}, + }, +}; + +/// Generates the impl for an event selector +/// +/// Inserts the following code: +/// ```noir +/// impl SomeStruct { +/// fn selector() -> FunctionSelector { +/// aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature("SIGNATURE_PLACEHOLDER") +/// } +/// } +/// ``` +/// +/// This allows developers to emit events without having to write the signature of the event every time they emit it. +/// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. +/// It'll get resolved after by transforming the HIR. +pub fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { + let struct_type = + make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![], true)); + + let selector_path = + chained_dep!("aztec", "protocol_types", "abis", "function_selector", "FunctionSelector"); + let mut from_signature_path = selector_path.clone(); + from_signature_path.segments.push(ident("from_signature")); + + let selector_fun_body = BlockExpression(vec![make_statement(StatementKind::Expression(call( + variable_path(from_signature_path), + vec![expression(ExpressionKind::Literal(Literal::Str(SIGNATURE_PLACEHOLDER.to_string())))], + )))]); + + // Define `FunctionSelector` return type + let return_type = + FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![], true))); + + let mut selector_fn_def = FunctionDefinition::normal( + &ident("selector"), + &vec![], + &[], + &selector_fun_body, + &[], + &return_type, + ); + + selector_fn_def.visibility = ItemVisibility::Public; + + // Seems to be necessary on contract modules + selector_fn_def.return_visibility = Visibility::Public; + + TypeImpl { + object_type: struct_type, + type_span: structure.span, + generics: vec![], + methods: vec![(NoirFunction::normal(selector_fn_def), Span::default())], + } +} + +/// Computes the signature for a resolved event type. +/// It has the form 'EventName(Field,(Field),[u8;2])' +fn event_signature(event: &StructType) -> String { + let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); + format!("{}({})", event.name.0.contents, fields.join(",")) +} + +/// Substitutes the signature literal that was introduced in the selector method previously with the actual signature. +fn transform_event( + struct_id: StructId, + interner: &mut NodeInterner, +) -> Result<(), (AztecMacroError, FileId)> { + let struct_type = interner.get_struct(struct_id); + let selector_id = interner + .lookup_method(&Type::Struct(struct_type.clone(), vec![]), struct_id, "selector", false) + .ok_or_else(|| { + let error = AztecMacroError::EventError { + span: struct_type.borrow().location.span, + message: "Selector method not found".to_owned(), + }; + (error, struct_type.borrow().location.file) + })?; + let selector_function = interner.function(&selector_id); + + let compute_selector_statement = interner.statement( + selector_function.block(interner).statements().first().ok_or_else(|| { + let error = AztecMacroError::EventError { + span: struct_type.borrow().location.span, + message: "Compute selector statement not found".to_owned(), + }; + (error, struct_type.borrow().location.file) + })?, + ); + + let compute_selector_expression = match compute_selector_statement { + HirStatement::Expression(expression_id) => match interner.expression(&expression_id) { + HirExpression::Call(hir_call_expression) => Some(hir_call_expression), + _ => None, + }, + _ => None, + } + .ok_or_else(|| { + let error = AztecMacroError::EventError { + span: struct_type.borrow().location.span, + message: "Compute selector statement is not a call expression".to_owned(), + }; + (error, struct_type.borrow().location.file) + })?; + + let first_arg_id = compute_selector_expression.arguments.first().ok_or_else(|| { + let error = AztecMacroError::EventError { + span: struct_type.borrow().location.span, + message: "Compute selector statement is not a call expression".to_owned(), + }; + (error, struct_type.borrow().location.file) + })?; + + match interner.expression(first_arg_id) { + HirExpression::Literal(HirLiteral::Str(signature)) + if signature == SIGNATURE_PLACEHOLDER => + { + let selector_literal_id = *first_arg_id; + + let structure = interner.get_struct(struct_id); + let signature = event_signature(&structure.borrow()); + interner.update_expression(selector_literal_id, |expr| { + *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); + }); + + // Also update the type! It might have a different length now than the placeholder. + interner.push_expr_type( + selector_literal_id, + Type::String(Box::new(Type::Constant(signature.len() as u64))), + ); + Ok(()) + } + _ => Err(( + AztecMacroError::EventError { + span: struct_type.borrow().location.span, + message: "Signature placeholder literal does not match".to_owned(), + }, + struct_type.borrow().location.file, + )), + } +} + +pub fn transform_events( + crate_id: &CrateId, + context: &mut HirContext, +) -> Result<(), (AztecMacroError, FileId)> { + for struct_id in collect_crate_structs(crate_id, context) { + let attributes = context.def_interner.struct_attributes(&struct_id); + if attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { + transform_event(struct_id, &mut context.def_interner)?; + } + } + Ok(()) +} diff --git a/aztec_macros/src/transforms/functions.rs b/aztec_macros/src/transforms/functions.rs new file mode 100644 index 00000000000..8b3c7d2f53b --- /dev/null +++ b/aztec_macros/src/transforms/functions.rs @@ -0,0 +1,707 @@ +use convert_case::{Case, Casing}; +use noirc_errors::Span; +use noirc_frontend::{ + macros_api::FieldElement, BlockExpression, ConstrainKind, ConstrainStatement, Distinctness, + Expression, ExpressionKind, ForLoopStatement, ForRange, FunctionReturnType, Ident, Literal, + NoirFunction, Param, PathKind, Pattern, Signedness, Statement, StatementKind, UnresolvedType, + UnresolvedTypeData, Visibility, +}; + +use crate::{ + chained_dep, chained_path, + utils::{ + ast_utils::{ + assignment, call, cast, expression, ident, ident_path, index_array, + index_array_variable, make_eq, make_statement, make_type, member_access, method_call, + mutable_assignment, mutable_reference, path, return_type, variable, variable_ident, + variable_path, + }, + errors::AztecMacroError, + }, +}; + +// If it does, it will insert the following things: +/// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs +/// - Hashes all of the function input variables +/// - This instantiates a helper function +pub fn transform_function( + ty: &str, + func: &mut NoirFunction, + storage_defined: bool, + is_initializer: bool, + insert_init_check: bool, + is_internal: bool, +) -> Result<(), AztecMacroError> { + let context_name = format!("{}Context", ty); + let inputs_name = format!("{}ContextInputs", ty); + let return_type_name = format!("{}CircuitPublicInputs", ty); + + // Add check that msg sender equals this address and flag function as internal + if is_internal { + let is_internal_check = create_internal_check(func.name()); + func.def.body.0.insert(0, is_internal_check); + } + + // Add initialization check + if insert_init_check { + let init_check = create_init_check(); + func.def.body.0.insert(0, init_check); + } + + // Add access to the storage struct + if storage_defined { + let storage_def = abstract_storage(&ty.to_lowercase(), false); + func.def.body.0.insert(0, storage_def); + } + + // Insert the context creation as the first action + let create_context = create_context(&context_name, &func.def.parameters)?; + func.def.body.0.splice(0..0, (create_context).iter().cloned()); + + // Add the inputs to the params + let input = create_inputs(&inputs_name); + func.def.parameters.insert(0, input); + + // Abstract return types such that they get added to the kernel's return_values + if let Some(return_values) = abstract_return_values(func) { + // In case we are pushing return values to the context, we remove the statement that originated it + // This avoids running duplicate code, since blocks like if/else can be value returning statements + func.def.body.0.pop(); + // Add the new return statement + func.def.body.0.push(return_values); + } + + // Before returning mark the contract as initialized + if is_initializer { + let mark_initialized = create_mark_as_initialized(); + func.def.body.0.push(mark_initialized); + } + + // Push the finish method call to the end of the function + let finish_def = create_context_finish(); + func.def.body.0.push(finish_def); + + let return_type = create_return_type(&return_type_name); + func.def.return_type = return_type; + func.def.return_visibility = Visibility::Public; + + // Distinct return types are only required for private functions + // Public functions should have unconstrained auto-inferred + match ty { + "Private" => func.def.return_distinctness = Distinctness::Distinct, + "Public" => func.def.is_unconstrained = true, + _ => (), + } + + Ok(()) +} + +/// Transform a function to work with AVM bytecode +pub fn transform_vm_function( + func: &mut NoirFunction, + storage_defined: bool, +) -> Result<(), AztecMacroError> { + // Create access to storage + if storage_defined { + let storage = abstract_storage("public_vm", true); + func.def.body.0.insert(0, storage); + } + + // Push Avm context creation to the beginning of the function + let create_context = create_avm_context()?; + func.def.body.0.insert(0, create_context); + + // We want the function to be seen as a public function + func.def.is_unconstrained = true; + + // NOTE: the line below is a temporary hack to trigger external transpilation tools + // It will be removed once the transpiler is integrated into the Noir compiler + func.def.name.0.contents = format!("avm_{}", func.def.name.0.contents); + Ok(()) +} + +/// Transform Unconstrained +/// +/// Inserts the following code at the beginning of an unconstrained function +/// ```noir +/// let storage = Storage::init(Context::none()); +/// ``` +/// +/// This will allow developers to access their contract' storage struct in unconstrained functions +pub fn transform_unconstrained(func: &mut NoirFunction) { + func.def.body.0.insert(0, abstract_storage("Unconstrained", true)); +} + +/// Helper function that returns what the private context would look like in the ast +/// This should make it available to be consumed within aztec private annotated functions. +/// +/// The replaced code: +/// ```noir +/// /// Before +/// fn foo(inputs: PrivateContextInputs) { +/// // ... +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() { +/// // ... +/// } +fn create_inputs(ty: &str) -> Param { + let context_ident = ident("inputs"); + let context_pattern = Pattern::Identifier(context_ident); + + let path_snippet = ty.to_case(Case::Snake); // e.g. private_context_inputs + let type_path = chained_dep!("aztec", "context", "inputs", &path_snippet, ty); + + let context_type = make_type(UnresolvedTypeData::Named(type_path, vec![], true)); + let visibility = Visibility::Private; + + Param { pattern: context_pattern, typ: context_type, visibility, span: Span::default() } +} + +/// Creates an initialization check to ensure that the contract has been initialized, meant to +/// be injected as the first statement of any function after the context has been created. +/// +/// ```noir +/// assert_is_initialized(&mut context); +/// ``` +fn create_init_check() -> Statement { + make_statement(StatementKind::Expression(call( + variable_path(chained_dep!("aztec", "initializer", "assert_is_initialized")), + vec![mutable_reference("context")], + ))) +} + +/// Creates a call to mark_as_initialized which emits the initialization nullifier, meant to +/// be injected as the last statement before returning in a constructor. +/// +/// ```noir +/// mark_as_initialized(&mut context); +/// ``` +fn create_mark_as_initialized() -> Statement { + make_statement(StatementKind::Expression(call( + variable_path(chained_dep!("aztec", "initializer", "mark_as_initialized")), + vec![mutable_reference("context")], + ))) +} + +/// Creates a check for internal functions ensuring that the caller is self. +/// +/// ```noir +/// assert(context.msg_sender() == context.this_address(), "Function can only be called internally"); +/// ``` +fn create_internal_check(fname: &str) -> Statement { + make_statement(StatementKind::Constrain(ConstrainStatement( + make_eq( + method_call(variable("context"), "msg_sender", vec![]), + method_call(variable("context"), "this_address", vec![]), + ), + Some(expression(ExpressionKind::Literal(Literal::Str(format!( + "Function {} can only be called internally", + fname + ))))), + ConstrainKind::Assert, + ))) +} + +/// Creates the private context object to be accessed within the function, the parameters need to be extracted to be +/// appended into the args hash object. +/// +/// The replaced code: +/// ```noir +/// #[aztec(private)] +/// fn foo(structInput: SomeStruct, arrayInput: [u8; 10], fieldInput: Field) -> Field { +/// // Create the hasher object +/// let mut hasher = Hasher::new(); +/// +/// // struct inputs call serialize on them to add an array of fields +/// hasher.add_multiple(structInput.serialize()); +/// +/// // Array inputs are iterated over and each element is added to the hasher (as a field) +/// for i in 0..arrayInput.len() { +/// hasher.add(arrayInput[i] as Field); +/// } +/// // Field inputs are added to the hasher +/// hasher.add({ident}); +/// +/// // Create the context +/// // The inputs (injected by this `create_inputs`) and completed hash object are passed to the context +/// let mut context = PrivateContext::new(inputs, hasher.hash()); +/// } +/// ``` +fn create_context(ty: &str, params: &[Param]) -> Result, AztecMacroError> { + let mut injected_expressions: Vec = vec![]; + + // `let mut hasher = Hasher::new();` + let let_hasher = mutable_assignment( + "hasher", // Assigned to + call( + variable_path(chained_dep!("aztec", "hasher", "Hasher", "new")), // Path + vec![], // args + ), + ); + + // Completes: `let mut hasher = Hasher::new();` + injected_expressions.push(let_hasher); + + // Iterate over each of the function parameters, adding to them to the hasher + for Param { pattern, typ, span, .. } in params { + match pattern { + Pattern::Identifier(identifier) => { + // Match the type to determine the padding to do + let unresolved_type = &typ.typ; + let expression = match unresolved_type { + // `hasher.add_multiple({ident}.serialize())` + UnresolvedTypeData::Named(..) => add_struct_to_hasher(identifier), + UnresolvedTypeData::Array(_, arr_type) => { + add_array_to_hasher(identifier, arr_type) + } + // `hasher.add({ident})` + UnresolvedTypeData::FieldElement => add_field_to_hasher(identifier), + // Add the integer to the hasher, casted to a field + // `hasher.add({ident} as Field)` + UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { + add_cast_to_hasher(identifier) + } + UnresolvedTypeData::String(..) => { + let (var_bytes, id) = str_to_bytes(identifier); + injected_expressions.push(var_bytes); + add_array_to_hasher( + &id, + &UnresolvedType { + typ: UnresolvedTypeData::Integer( + Signedness::Unsigned, + noirc_frontend::IntegerBitSize::ThirtyTwo, + ), + span: None, + }, + ) + } + _ => { + return Err(AztecMacroError::UnsupportedFunctionArgumentType { + typ: unresolved_type.clone(), + span: *span, + }) + } + }; + injected_expressions.push(expression); + } + _ => todo!(), // Maybe unreachable? + } + } + + // Create the inputs to the context + let inputs_expression = variable("inputs"); + // `hasher.hash()` + let hash_call = method_call( + variable("hasher"), // variable + "hash", // method name + vec![], // args + ); + + let path_snippet = ty.to_case(Case::Snake); // e.g. private_context + + // let mut context = {ty}::new(inputs, hash); + let let_context = mutable_assignment( + "context", // Assigned to + call( + variable_path(chained_dep!("aztec", "context", &path_snippet, ty, "new")), // Path + vec![inputs_expression, hash_call], // args + ), + ); + injected_expressions.push(let_context); + + // Return all expressions that will be injected by the hasher + Ok(injected_expressions) +} + +/// Creates an mutable avm context +/// +/// ```noir +/// /// Before +/// #[aztec(public-vm)] +/// fn foo() -> Field { +/// let mut context = aztec::context::AVMContext::new(); +/// let timestamp = context.timestamp(); +/// // ... +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() -> Field { +/// let mut timestamp = context.timestamp(); +/// // ... +/// } +fn create_avm_context() -> Result { + let let_context = mutable_assignment( + "context", // Assigned to + call( + variable_path(chained_dep!("aztec", "context", "AVMContext", "new")), // Path + vec![], // args + ), + ); + + Ok(let_context) +} + +/// Abstract Return Type +/// +/// This function intercepts the function's current return type and replaces it with pushes +/// To the kernel +/// +/// The replaced code: +/// ```noir +/// /// Before +/// #[aztec(private)] +/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { +/// // ... +/// let my_return_value: Field = 10; +/// context.return_values.push(my_return_value); +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() -> Field { +/// // ... +/// let my_return_value: Field = 10; +/// my_return_value +/// } +/// ``` +/// Similarly; Structs will be pushed to the context, after serialize() is called on them. +/// Arrays will be iterated over and each element will be pushed to the context. +/// Any primitive type that can be cast will be casted to a field and pushed to the context. +fn abstract_return_values(func: &NoirFunction) -> Option { + let current_return_type = func.return_type().typ; + let last_statement = func.def.body.0.last()?; + + // TODO: (length, type) => We can limit the size of the array returned to be limited by kernel size + // Doesn't need done until we have settled on a kernel size + // TODO: support tuples here and in inputs -> convert into an issue + // Check if the return type is an expression, if it is, we can handle it + match last_statement { + Statement { kind: StatementKind::Expression(expression), .. } => { + match current_return_type { + // Call serialize on structs, push the whole array, calling push_array + UnresolvedTypeData::Named(..) => Some(make_struct_return_type(expression.clone())), + UnresolvedTypeData::Array(..) => Some(make_array_return_type(expression.clone())), + // Cast these types to a field before pushing + UnresolvedTypeData::Bool | UnresolvedTypeData::Integer(..) => { + Some(make_castable_return_type(expression.clone())) + } + UnresolvedTypeData::FieldElement => Some(make_return_push(expression.clone())), + _ => None, + } + } + _ => None, + } +} + +/// Abstract storage +/// +/// For private functions: +/// ```noir +/// #[aztec(private)] +/// fn lol() { +/// let storage = Storage::init(Context::private(context)); +/// } +/// ``` +/// +/// For public functions: +/// ```noir +/// #[aztec(public)] +/// fn lol() { +/// let storage = Storage::init(Context::public(context)); +/// } +/// ``` +/// +/// For unconstrained functions: +/// ```noir +/// unconstrained fn lol() { +/// let storage = Storage::init(Context::none()); +/// } +fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { + let init_context_call = if unconstrained { + call( + variable_path(chained_dep!("aztec", "context", "Context", "none")), // Path + vec![], // args + ) + } else { + call( + variable_path(chained_dep!("aztec", "context", "Context", typ)), // Path + vec![mutable_reference("context")], // args + ) + }; + + assignment( + "storage", // Assigned to + call( + variable_path(chained_path!("Storage", "init")), // Path + vec![init_context_call], // args + ), + ) +} + +/// Context Return Values +/// +/// Creates an instance to the context return values +/// ```noir +/// `context.return_values` +/// ``` +fn context_return_values() -> Expression { + member_access("context", "return_values") +} + +/// Make return Push +/// +/// Translates to: +/// `context.return_values.push({push_value})` +fn make_return_push(push_value: Expression) -> Statement { + make_statement(StatementKind::Semi(method_call( + context_return_values(), + "push", + vec![push_value], + ))) +} + +/// Make Return push array +/// +/// Translates to: +/// `context.return_values.extend_from_array({push_value})` +fn make_return_extend_from_array(push_value: Expression) -> Statement { + make_statement(StatementKind::Semi(method_call( + context_return_values(), + "extend_from_array", + vec![push_value], + ))) +} + +/// Make struct return type +/// +/// Translates to: +/// ```noir +/// `context.return_values.extend_from_array({push_value}.serialize())` +fn make_struct_return_type(expression: Expression) -> Statement { + let serialized_call = method_call( + expression, // variable + "serialize", // method name + vec![], // args + ); + make_return_extend_from_array(serialized_call) +} + +/// Make array return type +/// +/// Translates to: +/// ```noir +/// for i in 0..{ident}.len() { +/// context.return_values.push({ident}[i] as Field) +/// } +/// ``` +fn make_array_return_type(expression: Expression) -> Statement { + let inner_cast_expression = + cast(index_array_variable(expression.clone(), "i"), UnresolvedTypeData::FieldElement); + let assignment = make_statement(StatementKind::Semi(method_call( + context_return_values(), // variable + "push", // method name + vec![inner_cast_expression], + ))); + + create_loop_over(expression, vec![assignment]) +} + +/// Castable return type +/// +/// Translates to: +/// ```noir +/// context.return_values.push({ident} as Field) +/// ``` +fn make_castable_return_type(expression: Expression) -> Statement { + // Cast these types to a field before pushing + let cast_expression = cast(expression, UnresolvedTypeData::FieldElement); + make_return_push(cast_expression) +} + +/// Create Return Type +/// +/// Public functions return protocol_types::abis::public_circuit_public_inputs::PublicCircuitPublicInputs while +/// private functions return protocol_types::abis::private_circuit_public_inputs::::PrivateCircuitPublicInputs +/// +/// This call constructs an ast token referencing the above types +/// The name is set in the function above `transform`, hence the +/// whole token name is passed in +/// +/// The replaced code: +/// ```noir +/// +/// /// Before +/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { +/// // ... +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() { +/// // ... +/// } +fn create_return_type(ty: &str) -> FunctionReturnType { + let path_snippet = ty.to_case(Case::Snake); // e.g. private_circuit_public_inputs or public_circuit_public_inputs + let return_path = chained_dep!("aztec", "protocol_types", "abis", &path_snippet, ty); + return_type(return_path) +} + +/// Create Context Finish +/// +/// Each aztec function calls `context.finish()` at the end of a function +/// to return values required by the kernel. +/// +/// The replaced code: +/// ```noir +/// /// Before +/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { +/// // ... +/// context.finish() +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() { +/// // ... +/// } +fn create_context_finish() -> Statement { + let method_call = method_call( + variable("context"), // variable + "finish", // method name + vec![], // args + ); + make_statement(StatementKind::Expression(method_call)) +} + +// +// Methods to create hasher inputs +// + +fn add_struct_to_hasher(identifier: &Ident) -> Statement { + // If this is a struct, we call serialize and add the array to the hasher + let serialized_call = method_call( + variable_path(path(identifier.clone())), // variable + "serialize", // method name + vec![], // args + ); + + make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + "add_multiple", // method name + vec![serialized_call], // args + ))) +} + +fn str_to_bytes(identifier: &Ident) -> (Statement, Ident) { + // let identifier_as_bytes = identifier.as_bytes(); + let var = variable_ident(identifier.clone()); + let contents = if let ExpressionKind::Variable(p) = &var.kind { + p.segments.first().cloned().unwrap_or_else(|| panic!("No segments")).0.contents + } else { + panic!("Unexpected identifier type") + }; + let bytes_name = format!("{}_bytes", contents); + let var_bytes = assignment(&bytes_name, method_call(var, "as_bytes", vec![])); + let id = Ident::new(bytes_name, Span::default()); + + (var_bytes, id) +} + +fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { + // If this is an array of primitive types (integers / fields) we can add them each to the hasher + // casted to a field + let span = var.span; + + // `array.len()` + let end_range_expression = method_call( + var, // variable + "len", // method name + vec![], // args + ); + + // What will be looped over + // - `hasher.add({ident}[i] as Field)` + let for_loop_block = expression(ExpressionKind::Block(BlockExpression(loop_body))); + + // `for i in 0..{ident}.len()` + make_statement(StatementKind::For(ForLoopStatement { + range: ForRange::Range( + expression(ExpressionKind::Literal(Literal::Integer( + FieldElement::from(i128::from(0)), + false, + ))), + end_range_expression, + ), + identifier: ident("i"), + block: for_loop_block, + span, + })) +} + +fn add_array_to_hasher(identifier: &Ident, arr_type: &UnresolvedType) -> Statement { + // If this is an array of primitive types (integers / fields) we can add them each to the hasher + // casted to a field + + // Wrap in the semi thing - does that mean ended with semi colon? + // `hasher.add({ident}[i] as Field)` + + let arr_index = index_array(identifier.clone(), "i"); + let (add_expression, hasher_method_name) = match arr_type.typ { + UnresolvedTypeData::Named(..) => { + let hasher_method_name = "add_multiple".to_owned(); + let call = method_call( + // All serialize on each element + arr_index, // variable + "serialize", // method name + vec![], // args + ); + (call, hasher_method_name) + } + _ => { + let hasher_method_name = "add".to_owned(); + let call = cast( + arr_index, // lhs - `ident[i]` + UnresolvedTypeData::FieldElement, // cast to - `as Field` + ); + (call, hasher_method_name) + } + }; + + let block_statement = make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + &hasher_method_name, // method name + vec![add_expression], + ))); + + create_loop_over(variable_ident(identifier.clone()), vec![block_statement]) +} + +fn add_field_to_hasher(identifier: &Ident) -> Statement { + // `hasher.add({ident})` + let ident = variable_path(path(identifier.clone())); + make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + "add", // method name + vec![ident], // args + ))) +} + +fn add_cast_to_hasher(identifier: &Ident) -> Statement { + // `hasher.add({ident} as Field)` + // `{ident} as Field` + let cast_operation = cast( + variable_path(path(identifier.clone())), // lhs + UnresolvedTypeData::FieldElement, // rhs + ); + + // `hasher.add({ident} as Field)` + make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + "add", // method name + vec![cast_operation], // args + ))) +} diff --git a/aztec_macros/src/transforms/mod.rs b/aztec_macros/src/transforms/mod.rs new file mode 100644 index 00000000000..144ffc3efc3 --- /dev/null +++ b/aztec_macros/src/transforms/mod.rs @@ -0,0 +1,4 @@ +pub mod compute_note_hash_and_nullifier; +pub mod events; +pub mod functions; +pub mod storage; diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs new file mode 100644 index 00000000000..40a094f78e3 --- /dev/null +++ b/aztec_macros/src/transforms/storage.rs @@ -0,0 +1,346 @@ +use std::borrow::{Borrow, BorrowMut}; + +use noirc_errors::Span; +use noirc_frontend::{ + graph::CrateId, + macros_api::{ + FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, + }, + node_interner::{TraitId, TraitImplKind}, + parser::SortedModule, + BlockExpression, Expression, ExpressionKind, FunctionDefinition, Ident, Literal, NoirFunction, + PathKind, Pattern, StatementKind, Type, TypeImpl, UnresolvedType, UnresolvedTypeData, +}; + +use crate::{ + chained_dep, chained_path, + utils::{ + ast_utils::{ + call, expression, ident, ident_path, lambda, make_statement, make_type, pattern, + return_type, variable, variable_path, + }, + errors::AztecMacroError, + hir_utils::{collect_crate_structs, collect_traits}, + }, +}; + +// Check to see if the user has defined a storage struct +pub fn check_for_storage_definition(module: &SortedModule) -> bool { + module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") +} + +// Check to see if the user has defined a storage struct +pub fn check_for_storage_implementation(module: &SortedModule) -> bool { + module.impls.iter().any(|r#impl| match &r#impl.object_type.typ { + UnresolvedTypeData::Named(path, _, _) => { + path.segments.last().is_some_and(|segment| segment.0.contents == "Storage") + } + _ => false, + }) +} + +/// Auxiliary function to generate the storage constructor for a given field, using +/// the Storage definition as a reference. Supports nesting. +pub fn generate_storage_field_constructor( + (type_ident, unresolved_type): &(Ident, UnresolvedType), + slot: Expression, +) -> Result { + let typ = &unresolved_type.typ; + match typ { + UnresolvedTypeData::Named(path, generics, _) => { + let mut new_path = path.clone().to_owned(); + new_path.segments.push(ident("new")); + match path.segments.last().unwrap().0.contents.as_str() { + "Map" => Ok(call( + variable_path(new_path), + vec![ + variable("context"), + slot, + lambda( + vec![ + ( + pattern("context"), + make_type(UnresolvedTypeData::Named( + chained_dep!("aztec", "context", "Context"), + vec![], + true, + )), + ), + ( + Pattern::Identifier(ident("slot")), + make_type(UnresolvedTypeData::FieldElement), + ), + ], + generate_storage_field_constructor( + &(type_ident.clone(), generics.iter().last().unwrap().clone()), + variable("slot"), + )?, + ), + ], + )), + _ => Ok(call(variable_path(new_path), vec![variable("context"), slot])), + } + } + _ => Err(AztecMacroError::UnsupportedStorageType { + typ: typ.clone(), + span: Some(type_ident.span()), + }), + } +} + +// Generates the Storage implementation block from the Storage struct definition if it does not exist +/// From: +/// +/// struct Storage { +/// a_map: Map>, +/// a_nested_map: Map>>, +/// a_field: SomeStoragePrimitive, +/// } +/// +/// To: +/// +/// impl Storage { +/// fn init(context: Context) -> Self { +/// Storage { +/// a_map: Map::new(context, 0, |context, slot| { +/// SomeStoragePrimitive::new(context, slot) +/// }), +/// a_nested_map: Map::new(context, 0, |context, slot| { +/// Map::new(context, slot, |context, slot| { +/// SomeStoragePrimitive::new(context, slot) +/// }) +/// }), +/// a_field: SomeStoragePrimitive::new(context, 0), +/// } +/// } +/// } +/// +/// Storage slots are generated as 0 and will be populated using the information from the HIR +/// at a later stage. +pub fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), AztecMacroError> { + let definition = + module.types.iter().find(|r#struct| r#struct.name.0.contents == "Storage").unwrap(); + + let slot_zero = expression(ExpressionKind::Literal(Literal::Integer( + FieldElement::from(i128::from(0)), + false, + ))); + + let field_constructors = definition + .fields + .iter() + .flat_map(|field| { + generate_storage_field_constructor(field, slot_zero.clone()) + .map(|expression| (field.0.clone(), expression)) + }) + .collect(); + + let storage_constructor_statement = make_statement(StatementKind::Expression(expression( + ExpressionKind::constructor((chained_path!("Storage"), field_constructors)), + ))); + + let init = NoirFunction::normal(FunctionDefinition::normal( + &ident("init"), + &vec![], + &[( + ident("context"), + make_type(UnresolvedTypeData::Named( + chained_dep!("aztec", "context", "Context"), + vec![], + true, + )), + )], + &BlockExpression(vec![storage_constructor_statement]), + &[], + &return_type(chained_path!("Self")), + )); + + let storage_impl = TypeImpl { + object_type: UnresolvedType { + typ: UnresolvedTypeData::Named(chained_path!("Storage"), vec![], true), + span: Some(Span::default()), + }, + type_span: Span::default(), + generics: vec![], + methods: vec![(init, Span::default())], + }; + module.impls.push(storage_impl); + + Ok(()) +} + +/// Obtains the serialized length of a type that implements the Serialize trait. +fn get_serialized_length( + traits: &[TraitId], + typ: &Type, + interner: &NodeInterner, +) -> Result { + let (struct_name, maybe_stored_in_state) = match typ { + Type::Struct(struct_type, generics) => { + Ok((struct_type.borrow().name.0.contents.clone(), generics.first())) + } + _ => Err(AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some("State storage variable must be a struct".to_string()), + }), + }?; + let stored_in_state = + maybe_stored_in_state.ok_or(AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some("State storage variable must be generic".to_string()), + })?; + + let is_note = traits.iter().any(|&trait_id| { + let r#trait = interner.get_trait(trait_id); + r#trait.name.0.contents == "NoteInterface" + && !interner.lookup_all_trait_implementations(stored_in_state, trait_id).is_empty() + }); + + // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicMutable for whatever reason though. + if struct_name == "Map" || (is_note && struct_name != "PublicMutable") { + return Ok(1); + } + + let serialized_trait_impl_kind = traits + .iter() + .find_map(|&trait_id| { + let r#trait = interner.get_trait(trait_id); + if r#trait.borrow().name.0.contents == "Serialize" + && r#trait.borrow().generics.len() == 1 + { + interner + .lookup_all_trait_implementations(stored_in_state, trait_id) + .into_iter() + .next() + } else { + None + } + }) + .ok_or(AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some("Stored data must implement Serialize trait".to_string()), + })?; + + let serialized_trait_impl_id = match serialized_trait_impl_kind { + TraitImplKind::Normal(trait_impl_id) => Ok(trait_impl_id), + _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), + }?; + + let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); + let serialized_trait_impl = serialized_trait_impl_shared.borrow(); + + match serialized_trait_impl.trait_generics.first().unwrap() { + Type::Constant(value) => Ok(*value), + _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), + } +} + +/// Assigns storage slots to the storage struct fields based on the serialized length of the types. This automatic assignment +/// will only trigger if the assigned storage slot is invalid (0 as generated by generate_storage_implementation) +pub fn assign_storage_slots( + crate_id: &CrateId, + context: &mut HirContext, +) -> Result<(), (AztecMacroError, FileId)> { + let traits: Vec<_> = collect_traits(context); + for struct_id in collect_crate_structs(crate_id, context) { + let interner: &mut NodeInterner = context.def_interner.borrow_mut(); + let r#struct = interner.get_struct(struct_id); + let file_id = r#struct.borrow().location.file; + if r#struct.borrow().name.0.contents == "Storage" && r#struct.borrow().id.krate().is_root() + { + let init_id = interner + .lookup_method( + &Type::Struct(interner.get_struct(struct_id), vec![]), + struct_id, + "init", + false, + ) + .ok_or(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some( + "Storage struct must have an init function".to_string(), + ), + }, + file_id, + ))?; + let init_function = interner.function(&init_id).block(interner); + let init_function_statement_id = init_function.statements().first().ok_or(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some("Init storage statement not found".to_string()), + }, + file_id, + ))?; + let storage_constructor_statement = interner.statement(init_function_statement_id); + + let storage_constructor_expression = match storage_constructor_statement { + HirStatement::Expression(expression_id) => { + match interner.expression(&expression_id) { + HirExpression::Constructor(hir_constructor_expression) => { + Ok(hir_constructor_expression) + } + _ => Err((AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some( + "Storage constructor statement must be a constructor expression" + .to_string(), + ), + }, file_id)) + } + } + _ => Err(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some( + "Storage constructor statement must be an expression".to_string(), + ), + }, + file_id, + )), + }?; + + let mut storage_slot: u64 = 1; + for (index, (_, expr_id)) in storage_constructor_expression.fields.iter().enumerate() { + let fields = r#struct.borrow().get_fields(&[]); + let (_, field_type) = fields.get(index).unwrap(); + let new_call_expression = match interner.expression(expr_id) { + HirExpression::Call(hir_call_expression) => Ok(hir_call_expression), + _ => Err(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some( + "Storage field initialization expression is not a call expression" + .to_string(), + ), + }, + file_id, + )), + }?; + + let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]); + + let current_storage_slot = match slot_arg_expression { + HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), + _ => Err(( + AztecMacroError::CouldNotAssignStorageSlots { + secondary_message: Some( + "Storage slot argument expression must be a literal integer" + .to_string(), + ), + }, + file_id, + )), + }?; + + if current_storage_slot != 0 { + continue; + } + + let type_serialized_len = get_serialized_length(&traits, field_type, interner) + .map_err(|err| (err, file_id))?; + interner.update_expression(new_call_expression.arguments[1], |expr| { + *expr = HirExpression::Literal(HirLiteral::Integer( + FieldElement::from(u128::from(storage_slot)), + false, + )); + }); + + storage_slot += type_serialized_len; + } + } + } + Ok(()) +} diff --git a/aztec_macros/src/utils/ast_utils.rs b/aztec_macros/src/utils/ast_utils.rs new file mode 100644 index 00000000000..71c6a93f388 --- /dev/null +++ b/aztec_macros/src/utils/ast_utils.rs @@ -0,0 +1,183 @@ +use noirc_errors::{Span, Spanned}; +use noirc_frontend::{ + token::SecondaryAttribute, BinaryOpKind, CallExpression, CastExpression, Expression, + ExpressionKind, FunctionReturnType, Ident, IndexExpression, InfixExpression, Lambda, + LetStatement, MemberAccessExpression, MethodCallExpression, Path, Pattern, PrefixExpression, + Statement, StatementKind, UnaryOp, UnresolvedType, UnresolvedTypeData, +}; + +// +// Helper macros for creating noir ast nodes +// +pub fn ident(name: &str) -> Ident { + Ident::new(name.to_string(), Span::default()) +} + +pub fn ident_path(name: &str) -> Path { + Path::from_ident(ident(name)) +} + +pub fn path(ident: Ident) -> Path { + Path::from_ident(ident) +} + +pub fn expression(kind: ExpressionKind) -> Expression { + Expression::new(kind, Span::default()) +} + +pub fn variable(name: &str) -> Expression { + expression(ExpressionKind::Variable(ident_path(name))) +} + +pub fn variable_ident(identifier: Ident) -> Expression { + expression(ExpressionKind::Variable(path(identifier))) +} + +pub fn variable_path(path: Path) -> Expression { + expression(ExpressionKind::Variable(path)) +} + +pub fn method_call( + object: Expression, + method_name: &str, + arguments: Vec, +) -> Expression { + expression(ExpressionKind::MethodCall(Box::new(MethodCallExpression { + object, + method_name: ident(method_name), + arguments, + }))) +} + +pub fn call(func: Expression, arguments: Vec) -> Expression { + expression(ExpressionKind::Call(Box::new(CallExpression { func: Box::new(func), arguments }))) +} + +pub fn pattern(name: &str) -> Pattern { + Pattern::Identifier(ident(name)) +} + +pub fn mutable(name: &str) -> Pattern { + Pattern::Mutable(Box::new(pattern(name)), Span::default(), true) +} + +pub fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { + make_statement(StatementKind::Let(LetStatement { + pattern: mutable(name), + r#type: make_type(UnresolvedTypeData::Unspecified), + expression: assigned_to, + })) +} + +pub fn mutable_reference(variable_name: &str) -> Expression { + expression(ExpressionKind::Prefix(Box::new(PrefixExpression { + operator: UnaryOp::MutableReference, + rhs: variable(variable_name), + }))) +} + +pub fn assignment(name: &str, assigned_to: Expression) -> Statement { + make_statement(StatementKind::Let(LetStatement { + pattern: pattern(name), + r#type: make_type(UnresolvedTypeData::Unspecified), + expression: assigned_to, + })) +} + +pub fn member_access(lhs: &str, rhs: &str) -> Expression { + expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { + lhs: variable(lhs), + rhs: ident(rhs), + }))) +} + +pub fn return_type(path: Path) -> FunctionReturnType { + let ty = make_type(UnresolvedTypeData::Named(path, vec![], true)); + FunctionReturnType::Ty(ty) +} + +pub fn lambda(parameters: Vec<(Pattern, UnresolvedType)>, body: Expression) -> Expression { + expression(ExpressionKind::Lambda(Box::new(Lambda { + parameters, + return_type: UnresolvedType { + typ: UnresolvedTypeData::Unspecified, + span: Some(Span::default()), + }, + body, + }))) +} + +pub fn make_eq(lhs: Expression, rhs: Expression) -> Expression { + expression(ExpressionKind::Infix(Box::new(InfixExpression { + lhs, + rhs, + operator: Spanned::from(Span::default(), BinaryOpKind::Equal), + }))) +} + +pub fn make_statement(kind: StatementKind) -> Statement { + Statement { span: Span::default(), kind } +} + +#[macro_export] +macro_rules! chained_path { + ( $base:expr ) => { + { + ident_path($base) + } + }; + ( $base:expr $(, $tail:expr)* ) => { + { + let mut base_path = ident_path($base); + $( + base_path.segments.push(ident($tail)); + )* + base_path + } + } +} + +#[macro_export] +macro_rules! chained_dep { + ( $base:expr $(, $tail:expr)* ) => { + { + let mut base_path = ident_path($base); + base_path.kind = PathKind::Dep; + $( + base_path.segments.push(ident($tail)); + )* + base_path + } + } +} + +pub fn cast(lhs: Expression, ty: UnresolvedTypeData) -> Expression { + expression(ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type: make_type(ty) }))) +} + +pub fn make_type(typ: UnresolvedTypeData) -> UnresolvedType { + UnresolvedType { typ, span: Some(Span::default()) } +} + +pub fn index_array(array: Ident, index: &str) -> Expression { + expression(ExpressionKind::Index(Box::new(IndexExpression { + collection: variable_path(path(array)), + index: variable(index), + }))) +} + +pub fn index_array_variable(array: Expression, index: &str) -> Expression { + expression(ExpressionKind::Index(Box::new(IndexExpression { + collection: array, + index: variable(index), + }))) +} + +/// Checks if an attribute is a custom attribute with a specific name +pub fn is_custom_attribute(attr: &SecondaryAttribute, attribute_name: &str) -> bool { + if let SecondaryAttribute::Custom(custom_attr) = attr { + custom_attr.as_str() == attribute_name + } else { + false + } +} diff --git a/aztec_macros/src/utils/checks.rs b/aztec_macros/src/utils/checks.rs new file mode 100644 index 00000000000..5232f67ae87 --- /dev/null +++ b/aztec_macros/src/utils/checks.rs @@ -0,0 +1,22 @@ +use noirc_frontend::{ + graph::CrateId, + macros_api::{FileId, HirContext, MacroError}, +}; + +use super::errors::AztecMacroError; + +/// Creates an error alerting the user that they have not downloaded the Aztec-noir library +pub fn check_for_aztec_dependency( + crate_id: &CrateId, + context: &HirContext, +) -> Result<(), (MacroError, FileId)> { + if has_aztec_dependency(crate_id, context) { + Ok(()) + } else { + Err((AztecMacroError::AztecDepNotFound.into(), context.crate_graph[crate_id].root_file_id)) + } +} + +pub fn has_aztec_dependency(crate_id: &CrateId, context: &HirContext) -> bool { + context.crate_graph[crate_id].dependencies.iter().any(|dep| dep.as_name() == "aztec") +} diff --git a/aztec_macros/src/utils/constants.rs b/aztec_macros/src/utils/constants.rs new file mode 100644 index 00000000000..464cd10e2c7 --- /dev/null +++ b/aztec_macros/src/utils/constants.rs @@ -0,0 +1,3 @@ +pub const FUNCTION_TREE_HEIGHT: u32 = 5; +pub const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); +pub const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; diff --git a/aztec_macros/src/utils/errors.rs b/aztec_macros/src/utils/errors.rs new file mode 100644 index 00000000000..63892b58af9 --- /dev/null +++ b/aztec_macros/src/utils/errors.rs @@ -0,0 +1,69 @@ +use noirc_errors::Span; +use noirc_frontend::{macros_api::MacroError, UnresolvedTypeData}; + +use super::constants::MAX_CONTRACT_PRIVATE_FUNCTIONS; + +#[derive(Debug, Clone)] +pub enum AztecMacroError { + AztecDepNotFound, + ContractHasTooManyPrivateFunctions { span: Span }, + ContractConstructorMissing { span: Span }, + UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, + UnsupportedStorageType { span: Option, typ: UnresolvedTypeData }, + CouldNotAssignStorageSlots { secondary_message: Option }, + CouldNotImplementNoteSerialization { span: Option, typ: UnresolvedTypeData }, + EventError { span: Span, message: String }, + UnsupportedAttributes { span: Span, secondary_message: Option }, +} + +impl From for MacroError { + fn from(err: AztecMacroError) -> Self { + match err { + AztecMacroError::AztecDepNotFound {} => MacroError { + primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml. For more information go to https://docs.aztec.network/developers/debugging/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), + secondary_message: None, + span: None, + }, + AztecMacroError::ContractHasTooManyPrivateFunctions { span } => MacroError { + primary_message: format!("Contract can only have a maximum of {} private functions", MAX_CONTRACT_PRIVATE_FUNCTIONS), + secondary_message: None, + span: Some(span), + }, + AztecMacroError::ContractConstructorMissing { span } => MacroError { + primary_message: "Contract must have a constructor function".to_owned(), + secondary_message: None, + span: Some(span), + }, + AztecMacroError::UnsupportedFunctionArgumentType { span, typ } => MacroError { + primary_message: format!("Provided parameter type `{typ:?}` is not supported in Aztec contract interface"), + secondary_message: None, + span: Some(span), + }, + AztecMacroError::UnsupportedStorageType { span, typ } => MacroError { + primary_message: format!("Provided storage type `{typ:?}` is not directly supported in Aztec. Please provide a custom storage implementation"), + secondary_message: None, + span, + }, + AztecMacroError::CouldNotAssignStorageSlots { secondary_message } => MacroError { + primary_message: "Could not assign storage slots, please provide a custom storage implementation".to_string(), + secondary_message, + span: None, + }, + AztecMacroError::CouldNotImplementNoteSerialization { span, typ } => MacroError { + primary_message: format!("Could not implement serialization methods for note `{typ:?}`, please provide a serialize_content and deserialize_content methods"), + secondary_message: None, + span, + }, + AztecMacroError::EventError { span, message } => MacroError { + primary_message: message, + secondary_message: None, + span: Some(span), + }, +AztecMacroError::UnsupportedAttributes { span, secondary_message } => MacroError { + primary_message: "Unsupported attributes in contract function".to_string(), + secondary_message, + span: Some(span), + }, + } + } +} diff --git a/aztec_macros/src/utils/hir_utils.rs b/aztec_macros/src/utils/hir_utils.rs new file mode 100644 index 00000000000..f31a0584261 --- /dev/null +++ b/aztec_macros/src/utils/hir_utils.rs @@ -0,0 +1,118 @@ +use iter_extended::vecmap; +use noirc_frontend::{ + graph::CrateId, + hir::def_collector::dc_crate::UnresolvedTraitImpl, + macros_api::{HirContext, ModuleDefId, StructId}, + node_interner::{TraitId, TraitImplId}, + Signedness, Type, UnresolvedTypeData, +}; + +pub fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec { + context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .flat_map(|(_, module)| { + module.type_definitions().filter_map(|typ| { + if let ModuleDefId::TypeId(struct_id) = typ { + Some(struct_id) + } else { + None + } + }) + }) + .collect() +} + +pub fn collect_traits(context: &HirContext) -> Vec { + let crates = context.crates(); + crates + .flat_map(|crate_id| context.def_map(&crate_id).map(|def_map| def_map.modules())) + .flatten() + .flat_map(|module| { + module.type_definitions().filter_map(|typ| { + if let ModuleDefId::TraitId(struct_id) = typ { + Some(struct_id) + } else { + None + } + }) + }) + .collect() +} + +/// Computes the aztec signature for a resolved type. +pub fn signature_of_type(typ: &Type) -> String { + match typ { + Type::Integer(Signedness::Signed, bit_size) => format!("i{}", bit_size), + Type::Integer(Signedness::Unsigned, bit_size) => format!("u{}", bit_size), + Type::FieldElement => "Field".to_owned(), + Type::Bool => "bool".to_owned(), + Type::Array(len, typ) => { + if let Type::Constant(len) = **len { + format!("[{};{len}]", signature_of_type(typ)) + } else { + unimplemented!("Cannot generate signature for array with length type {:?}", typ) + } + } + Type::Struct(def, args) => { + let fields = def.borrow().get_fields(args); + let fields = vecmap(fields, |(_, typ)| signature_of_type(&typ)); + format!("({})", fields.join(",")) + } + Type::Tuple(types) => { + let fields = vecmap(types, signature_of_type); + format!("({})", fields.join(",")) + } + _ => unimplemented!("Cannot generate signature for type {:?}", typ), + } +} + +// Fetches the name of all structs that implement trait_name, both in the current crate and all of its dependencies. +pub fn fetch_struct_trait_impls( + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + trait_name: &str, +) -> Vec { + let mut struct_typenames: Vec = Vec::new(); + + // These structs can be declared in either external crates or the current one. External crates that contain + // dependencies have already been processed and resolved, but are available here via the NodeInterner. Note that + // crates on which the current crate does not depend on may not have been processed, and will be ignored. + for trait_impl_id in 0..context.def_interner.next_trait_impl_id().0 { + let trait_impl = &context.def_interner.get_trait_implementation(TraitImplId(trait_impl_id)); + + if trait_impl.borrow().ident.0.contents == *trait_name { + if let Type::Struct(s, _) = &trait_impl.borrow().typ { + struct_typenames.push(s.borrow().name.0.contents.clone()); + } else { + panic!("Found impl for {} on non-Struct", trait_name); + } + } + } + + // This crate's traits and impls have not yet been resolved, so we look for impls in unresolved_trait_impls. + struct_typenames.extend( + unresolved_traits_impls + .iter() + .filter(|trait_impl| { + trait_impl + .trait_path + .segments + .last() + .expect("ICE: empty trait_impl path") + .0 + .contents + == *trait_name + }) + .filter_map(|trait_impl| match &trait_impl.object_type.typ { + UnresolvedTypeData::Named(path, _, _) => { + Some(path.segments.last().unwrap().0.contents.clone()) + } + _ => None, + }), + ); + + struct_typenames +} diff --git a/aztec_macros/src/utils/mod.rs b/aztec_macros/src/utils/mod.rs new file mode 100644 index 00000000000..c8914f83025 --- /dev/null +++ b/aztec_macros/src/utils/mod.rs @@ -0,0 +1,5 @@ +pub mod ast_utils; +pub mod checks; +pub mod constants; +pub mod errors; +pub mod hir_utils; diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index 681976735f3..a7fe0b4b610 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -26,4 +26,3 @@ tracing.workspace = true thiserror.workspace = true aztec_macros = { path = "../../aztec_macros" } -noirc_macros = { path = "../../noirc_macros" } diff --git a/compiler/noirc_driver/src/contract.rs b/compiler/noirc_driver/src/contract.rs index 5f4b66e7dd2..66e8dc0e730 100644 --- a/compiler/noirc_driver/src/contract.rs +++ b/compiler/noirc_driver/src/contract.rs @@ -9,23 +9,6 @@ use noirc_evaluator::errors::SsaReport; use super::debug::DebugFile; -/// Describes the types of smart contract functions that are allowed. -/// Unlike the similar enum in noirc_frontend, 'open' and 'unconstrained' -/// are mutually exclusive here. In the case a function is both, 'unconstrained' -/// takes precedence. -#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] -pub enum ContractFunctionType { - /// This function will be executed in a private - /// context. - Secret, - /// This function will be executed in a public - /// context. - Open, - /// This function cannot constrain any values and can use nondeterministic features - /// like arrays of a dynamic size. - Unconstrained, -} - #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompiledContract { pub noir_version: String, @@ -55,9 +38,9 @@ pub struct CompiledContract { pub struct ContractFunction { pub name: String, - pub function_type: ContractFunctionType, + pub is_unconstrained: bool, - pub is_internal: bool, + pub custom_attributes: Vec, pub abi: Abi, @@ -69,13 +52,3 @@ pub struct ContractFunction { pub debug: DebugInfo, } - -impl ContractFunctionType { - pub(super) fn new(kind: noirc_frontend::ContractFunctionType, is_unconstrained: bool) -> Self { - match (kind, is_unconstrained) { - (_, true) => Self::Unconstrained, - (noirc_frontend::ContractFunctionType::Secret, false) => Self::Secret, - (noirc_frontend::ContractFunctionType::Open, false) => Self::Open, - } - } -} diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 11f53cdb749..c9494a64b41 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -18,6 +18,7 @@ use noirc_frontend::hir::Context; use noirc_frontend::macros_api::MacroProcessor; use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug, MonomorphizationError}; use noirc_frontend::node_interner::FuncId; +use noirc_frontend::token::SecondaryAttribute; use std::path::Path; use thiserror::Error; use tracing::info; @@ -30,7 +31,7 @@ mod stdlib; use debug::filter_relevant_files; -pub use contract::{CompiledContract, ContractFunction, ContractFunctionType}; +pub use contract::{CompiledContract, ContractFunction}; pub use debug::DebugFile; pub use program::CompiledProgram; @@ -237,14 +238,8 @@ pub fn check_crate( deny_warnings: bool, disable_macros: bool, ) -> CompilationResult<()> { - let macros: Vec<&dyn MacroProcessor> = if disable_macros { - vec![&noirc_macros::AssertMessageMacro as &dyn MacroProcessor] - } else { - vec![ - &aztec_macros::AztecMacro as &dyn MacroProcessor, - &noirc_macros::AssertMessageMacro as &dyn MacroProcessor, - ] - }; + let macros: &[&dyn MacroProcessor] = + if disable_macros { &[] } else { &[&aztec_macros::AztecMacro as &dyn MacroProcessor] }; let mut errors = vec![]; let diagnostics = CrateDefMap::collect_defs(crate_id, context, macros); @@ -404,19 +399,24 @@ fn compile_contract_inner( }; warnings.extend(function.warnings); let modifiers = context.def_interner.function_modifiers(&function_id); - let func_type = modifiers - .contract_function_type - .expect("Expected contract function to have a contract visibility"); - let function_type = ContractFunctionType::new(func_type, modifiers.is_unconstrained); + let custom_attributes = modifiers + .attributes + .secondary + .iter() + .filter_map( + |attr| if let SecondaryAttribute::Custom(tag) = attr { Some(tag) } else { None }, + ) + .cloned() + .collect(); functions.push(ContractFunction { name, - function_type, - is_internal: modifiers.is_internal.unwrap_or(false), + custom_attributes, abi: function.abi, bytecode: function.circuit, debug: function.debug, + is_unconstrained: modifiers.is_unconstrained, }); } diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index 67ec851d46d..09117bdc3b7 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -24,6 +24,9 @@ use serde::{ #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord, Deserialize, Serialize)] pub struct DebugVarId(pub u32); +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord, Deserialize, Serialize)] +pub struct DebugFnId(pub u32); + #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord, Deserialize, Serialize)] pub struct DebugTypeId(pub u32); @@ -33,7 +36,14 @@ pub struct DebugVariable { pub debug_type_id: DebugTypeId, } +#[derive(Debug, Clone, Hash, Deserialize, Serialize)] +pub struct DebugFunction { + pub name: String, + pub arg_names: Vec, +} + pub type DebugVariables = BTreeMap; +pub type DebugFunctions = BTreeMap; pub type DebugTypes = BTreeMap; #[serde_as] @@ -45,6 +55,7 @@ pub struct DebugInfo { #[serde_as(as = "BTreeMap")] pub locations: BTreeMap>, pub variables: DebugVariables, + pub functions: DebugFunctions, pub types: DebugTypes, } @@ -60,9 +71,10 @@ impl DebugInfo { pub fn new( locations: BTreeMap>, variables: DebugVariables, + functions: DebugFunctions, types: DebugTypes, ) -> Self { - Self { locations, variables, types } + Self { locations, variables, functions, types } } /// Updates the locations map when the [`Circuit`][acvm::acir::circuit::Circuit] is modified. diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index c04d8475f08..5b227372396 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -248,17 +248,6 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_binary(binary, dfg, result_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let condition = SingleAddrVariable { - address: self.brillig_context.allocate_register(), - bit_size: 1, - }; - - self.convert_ssa_binary( - &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, - dfg, - condition, - ); - let assert_message = if let Some(error) = assert_message { match error.as_ref() { ConstrainError::Static(string) => Some(string.clone()), @@ -282,6 +271,17 @@ impl<'block> BrilligBlock<'block> { None }; + let condition = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: 1, + }; + + self.convert_ssa_binary( + &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, + dfg, + condition, + ); + self.brillig_context.constrain_instruction(condition.address, assert_message); self.brillig_context.deallocate_register(condition.address); } @@ -831,16 +831,10 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: array set on non-array"), }; + // Here we want to compare the reference count against 1. let one = self.brillig_context.make_usize_constant(1_usize.into()); let condition = self.brillig_context.allocate_register(); - - self.brillig_context.binary_instruction( - reference_count, - one, - condition, - BrilligBinaryOp::Field { op: BinaryFieldOp::Equals }, - ); - + self.brillig_context.memory_op(reference_count, one, condition, BinaryIntOp::Equals); self.brillig_context.branch_instruction(condition, |ctx, cond| { if cond { // Reference count is 1, we can mutate the array directly diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index f1a8f24ed03..b66efa4da27 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -27,12 +27,11 @@ use acvm::{ FieldElement, }; use debug_show::DebugShow; -use num_bigint::BigUint; /// The Brillig VM does not apply a limit to the memory address space, /// As a convention, we take use 64 bits. This means that we assume that /// memory has 2^64 memory slots. -pub(crate) const BRILLIG_MEMORY_ADDRESSING_BIT_SIZE: u32 = 32; +pub(crate) const BRILLIG_MEMORY_ADDRESSING_BIT_SIZE: u32 = 64; // Registers reserved in runtime for special purposes. pub(crate) enum ReservedRegisters { @@ -213,13 +212,7 @@ impl BrilligContext { self.debug_show.array_get(array_ptr, index, result); // Computes array_ptr + index, ie array[index] let index_of_element_in_memory = self.allocate_register(); - self.binary_instruction( - array_ptr, - index, - index_of_element_in_memory, - BrilligBinaryOp::Field { op: BinaryFieldOp::Add }, - ); - + self.memory_op(array_ptr, index, index_of_element_in_memory, BinaryIntOp::Add); self.load_instruction(result, index_of_element_in_memory); // Free up temporary register self.deallocate_register(index_of_element_in_memory); @@ -239,7 +232,10 @@ impl BrilligContext { array_ptr, index, index_of_element_in_memory, - BrilligBinaryOp::Field { op: BinaryFieldOp::Add }, + BrilligBinaryOp::Integer { + op: BinaryIntOp::Add, + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }, ); self.store_instruction(index_of_element_in_memory, value); @@ -562,6 +558,7 @@ impl BrilligContext { bit_size: u32, ) { self.debug_show.const_instruction(result, constant); + self.push_opcode(BrilligOpcode::Const { destination: result, value: constant, bit_size }); } @@ -725,6 +722,8 @@ impl BrilligContext { /// Instead truncation instructions are emitted as to when a /// truncation should be done. /// For Brillig, all integer operations will overflow as its cheap. + /// We currently use cast to truncate: we cast to the required bit size + /// and back to the original bit size. pub(crate) fn truncate_instruction( &mut self, destination_of_truncated_value: SingleAddrVariable, @@ -743,20 +742,12 @@ impl BrilligContext { value_to_truncate.bit_size ); - let mask = BigUint::from(2_u32).pow(bit_size) - BigUint::from(1_u32); - let mask_constant = self.make_constant( - FieldElement::from_be_bytes_reduce(&mask.to_bytes_be()).into(), - value_to_truncate.bit_size, - ); - - self.binary_instruction( - value_to_truncate.address, - mask_constant, - destination_of_truncated_value.address, - BrilligBinaryOp::Integer { op: BinaryIntOp::And, bit_size: value_to_truncate.bit_size }, - ); - - self.deallocate_register(mask_constant); + // We cast back and forth to ensure that the value is truncated. + let intermediate_register = + SingleAddrVariable { address: self.allocate_register(), bit_size }; + self.cast_instruction(intermediate_register, value_to_truncate); + self.cast_instruction(destination_of_truncated_value, intermediate_register); + self.deallocate_register(intermediate_register.address); } /// Emits a stop instruction diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 0bb81efe977..56cb76adbe4 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -88,6 +88,7 @@ pub fn create_circuit( ) -> Result<(Circuit, DebugInfo, Vec, Vec, Vec), RuntimeError> { let debug_variables = program.debug_variables.clone(); let debug_types = program.debug_types.clone(); + let debug_functions = program.debug_functions.clone(); let func_sig = program.main_function_signature.clone(); let recursive = program.recursive; let mut generated_acir = optimize_into_acir( @@ -130,7 +131,7 @@ pub fn create_circuit( .map(|(index, locations)| (index, locations.into_iter().collect())) .collect(); - let mut debug_info = DebugInfo::new(locations, debug_variables, debug_types); + let mut debug_info = DebugInfo::new(locations, debug_variables, debug_functions, debug_types); // Perform any ACIR-level optimizations let (optimized_circuit, transformation_map) = acvm::compiler::optimize(circuit); diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 8d4d0668534..140ed0b53ff 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -2268,11 +2268,9 @@ impl Context { // We can omit the element size array for arrays which don't contain arrays or slices. fn can_omit_element_sizes_array(array_typ: &Type) -> bool { - if array_typ.contains_slice_element() { - return false; - } - let Type::Array(types, _) = array_typ else { - panic!("ICE: expected array type"); + let types = match array_typ { + Type::Array(types, _) | Type::Slice(types) => types, + _ => panic!("ICE: expected array or slice type"), }; !types.iter().any(|typ| typ.contains_an_array()) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 9d27554dcaa..bf34a47485b 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -393,10 +393,25 @@ impl FunctionBuilder { self.increment_array_reference_count(value); } } - Type::Array(..) | Type::Slice(..) => { - self.insert_instruction(Instruction::IncrementRc { value }, None); + typ @ Type::Array(..) | typ @ Type::Slice(..) => { // If there are nested arrays or slices, we wait until ArrayGet // is issued to increment the count of that array. + self.insert_instruction(Instruction::IncrementRc { value }, None); + + // This is a bit odd, but in brillig the inc_rc instruction operates on + // a copy of the array's metadata, so we need to re-store a loaded array + // even if there have been no other changes to it. + if let Value::Instruction { instruction, .. } = &self.current_function.dfg[value] { + let instruction = &self.current_function.dfg[*instruction]; + if let Instruction::Load { address } = instruction { + // We can't re-use `value` in case the original address was stored + // to again in the meantime. So introduce another load. + let address = *address; + let value = self.insert_load(address, typ); + self.insert_instruction(Instruction::IncrementRc { value }, None); + self.insert_store(address, value); + } + } } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 943a57c1bc0..46f1e7a2765 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -199,22 +199,47 @@ struct Context<'f> { /// found, the top of this conditions stack is popped since we are no longer under that /// condition. If we are under multiple conditions (a nested if), the topmost condition is /// the most recent condition combined with all previous conditions via `And` instructions. - conditions: Vec<(BasicBlockId, ValueId)>, + condition_stack: Vec, /// Maps SSA array values with a slice type to their size. /// This is maintained by appropriate calls to the `SliceCapacityTracker` and is used by the `ValueMerger`. slice_sizes: HashMap, + + /// Stack of block arguments + /// When processing a block, we pop this stack to get its arguments + /// and at the end we push the arguments for his successor + arguments_stack: Vec>, } +#[derive(Clone)] pub(crate) struct Store { old_value: ValueId, new_value: ValueId, } -struct Branch { - condition: ValueId, +#[derive(Clone)] +struct ConditionalBranch { + // Contains the last processed block during the processing of the branch. last_block: BasicBlockId, + // The unresolved condition of the branch + old_condition: ValueId, + // The condition of the branch + condition: ValueId, + // The store values accumulated when processing the branch store_values: HashMap, + // The allocations accumulated when processing the branch + local_allocations: HashSet, +} + +struct ConditionalContext { + // Condition from the conditional statement + condition: ValueId, + // Block containing the conditional statement + entry_block: BasicBlockId, + // First block of the then branch + then_branch: ConditionalBranch, + // First block of the else branch + else_branch: Option, } fn flatten_function_cfg(function: &mut Function) { @@ -233,90 +258,117 @@ fn flatten_function_cfg(function: &mut Function) { store_values: HashMap::default(), local_allocations: HashSet::new(), branch_ends, - conditions: Vec::new(), slice_sizes: HashMap::default(), + condition_stack: Vec::new(), + arguments_stack: Vec::new(), }; context.flatten(); } impl<'f> Context<'f> { fn flatten(&mut self) { - // Start with following the terminator of the entry block since we don't - // need to flatten the entry block into itself. - self.handle_terminator(self.inserter.function.entry_block()); + // Flatten the CFG by inlining all instructions from the queued blocks + // until all blocks have been flattened. + // We follow the terminator of each block to determine which blocks to + // process next + let mut queue = vec![self.inserter.function.entry_block()]; + while let Some(block) = queue.pop() { + self.inline_block(block); + let to_process = self.handle_terminator(block, &queue); + for incoming_block in to_process { + if !queue.contains(&incoming_block) { + queue.push(incoming_block); + } + } + } } - /// Check the terminator of the given block and recursively inline any blocks reachable from - /// it. Since each block from a jmpif terminator is inlined successively, we must handle - /// instructions with side effects like constrain and store specially to preserve correctness. - /// For these instructions we must keep track of what the current condition is and modify - /// the instructions according to the module-level comment at the top of this file. Note that - /// the current condition is all the jmpif conditions required to reach the current block, - /// combined via `And` instructions. - /// - /// Returns the last block to be inlined. This is either the return block of the function or, - /// if self.conditions is not empty, the end block of the most recent condition. - fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId { - // As we recursively flatten inner blocks, we need to track the slice information - // for the outer block before we start recursively inlining - let outer_block_instructions = self.inserter.function.dfg[block].instructions(); - let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - for instruction in outer_block_instructions { - let results = self.inserter.function.dfg.instruction_results(*instruction); - let instruction = &self.inserter.function.dfg[*instruction]; + /// Returns the updated condition so that + /// it is 'AND-ed' with the previous condition (if any) + fn link_condition(&mut self, condition: ValueId) -> ValueId { + // Retrieve the previous condition + if let Some(context) = self.condition_stack.last() { + let previous_branch = context.else_branch.as_ref().unwrap_or(&context.then_branch); + let and = Instruction::binary(BinaryOp::And, previous_branch.condition, condition); + self.insert_instruction(and, CallStack::new()) + } else { + condition + } + } + + /// Returns the current condition + fn get_last_condition(&self) -> Option { + self.condition_stack.last().map(|context| match &context.else_branch { + Some(else_branch) => else_branch.condition, + None => context.then_branch.condition, + }) + } + + // Inline all instructions from the given block into the entry block, and track slice capacities + fn inline_block(&mut self, block: BasicBlockId) { + if self.inserter.function.entry_block() == block { + // we do not inline the entry block into itself + // for the outer block before we start inlining + let outer_block_instructions = self.inserter.function.dfg[block].instructions(); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for instruction in outer_block_instructions { + let results = self.inserter.function.dfg.instruction_results(*instruction); + let instruction = &self.inserter.function.dfg[*instruction]; + capacity_tracker.collect_slice_information( + instruction, + &mut self.slice_sizes, + results.to_vec(), + ); + } + + return; + } + + let arguments = self.arguments_stack.pop().unwrap(); + self.inserter.remember_block_params(block, &arguments); + + // If this is not a separate variable, clippy gets confused and says the to_vec is + // unnecessary, when removing it actually causes an aliasing/mutability error. + let instructions = self.inserter.function.dfg[block].instructions().to_vec(); + for instruction in instructions.iter() { + let results = self.push_instruction(*instruction); + let (instruction, _) = self.inserter.map_instruction(*instruction); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); capacity_tracker.collect_slice_information( - instruction, + &instruction, &mut self.slice_sizes, - results.to_vec(), + results, ); } + } - match self.inserter.function.dfg[block].unwrap_terminator() { + /// Returns the list of blocks that need to be processed after the given block + /// For a normal block, it would be its successor + /// For blocks related to a conditional statement, we ensure to process + /// the 'then-branch', then the 'else-branch' (if it exists), and finally the end block + fn handle_terminator( + &mut self, + block: BasicBlockId, + work_list: &[BasicBlockId], + ) -> Vec { + let terminator = self.inserter.function.dfg[block].unwrap_terminator().clone(); + match &terminator { TerminatorInstruction::JmpIf { condition, then_destination, else_destination } => { - let old_condition = *condition; - let then_block = *then_destination; - let else_block = *else_destination; - let then_condition = self.inserter.resolve(old_condition); - - let one = FieldElement::one(); - let then_branch = - self.inline_branch(block, then_block, old_condition, then_condition, one); - - let else_condition = - self.insert_instruction(Instruction::Not(then_condition), CallStack::new()); - let zero = FieldElement::zero(); - - // Make sure the else branch sees the previous values of each store - // rather than any values created in the 'then' branch. - self.undo_stores_in_then_branch(&then_branch); - - let else_branch = - self.inline_branch(block, else_block, old_condition, else_condition, zero); - - // We must remember to reset whether side effects are enabled when both branches - // end, in addition to resetting the value of old_condition since it is set to - // known to be true/false within the then/else branch respectively. - self.insert_current_side_effects_enabled(); - - // We must map back to `then_condition` here. Mapping `old_condition` to itself would - // lose any previous mappings. - self.inserter.map_value(old_condition, then_condition); - - // While there is a condition on the stack we don't compile outside the condition - // until it is popped. This ensures we inline the full then and else branches - // before continuing from the end of the conditional here where they can be merged properly. - let end = self.branch_ends[&block]; - self.inline_branch_end(end, then_branch, else_branch) + self.arguments_stack.push(vec![]); + self.if_start(condition, then_destination, else_destination, &block) } TerminatorInstruction::Jmp { destination, arguments, call_stack: _ } => { - if let Some((end_block, _)) = self.conditions.last() { - if destination == end_block { - return block; + let arguments = vecmap(arguments.clone(), |value| self.inserter.resolve(value)); + self.arguments_stack.push(arguments); + if work_list.contains(destination) { + if work_list.last() == Some(destination) { + self.else_stop(&block) + } else { + self.then_stop(&block) } + } else { + vec![*destination] } - let destination = *destination; - let arguments = vecmap(arguments.clone(), |value| self.inserter.resolve(value)); - self.inline_block(destination, &arguments) } TerminatorInstruction::Return { return_values, call_stack } => { let call_stack = call_stack.clone(); @@ -326,133 +378,133 @@ impl<'f> Context<'f> { let entry = self.inserter.function.entry_block(); self.inserter.function.dfg.set_block_terminator(entry, new_return); - block + vec![] } } } - /// Push a condition to the stack of conditions. - /// - /// This condition should be present while we're inlining each block reachable from the 'then' - /// branch of a jmpif instruction, until the branches eventually join back together. Likewise, - /// !condition should be present while we're inlining each block reachable from the 'else' - /// branch of a jmpif instruction until the join block. - fn push_condition(&mut self, start_block: BasicBlockId, condition: ValueId) { - let end_block = self.branch_ends[&start_block]; - - if let Some((_, previous_condition)) = self.conditions.last() { - let and = Instruction::binary(BinaryOp::And, *previous_condition, condition); - let new_condition = self.insert_instruction(and, CallStack::new()); - self.conditions.push((end_block, new_condition)); - } else { - self.conditions.push((end_block, condition)); + /// Process a conditional statement + fn if_start( + &mut self, + condition: &ValueId, + then_destination: &BasicBlockId, + else_destination: &BasicBlockId, + if_entry: &BasicBlockId, + ) -> Vec { + // manage conditions + let old_condition = *condition; + let then_condition = self.inserter.resolve(old_condition); + + let one = FieldElement::one(); + let old_stores = std::mem::take(&mut self.store_values); + let old_allocations = std::mem::take(&mut self.local_allocations); + let branch = ConditionalBranch { + old_condition, + condition: self.link_condition(then_condition), + store_values: old_stores, + local_allocations: old_allocations, + last_block: *then_destination, + }; + let cond_context = ConditionalContext { + condition: then_condition, + entry_block: *if_entry, + then_branch: branch, + else_branch: None, + }; + self.condition_stack.push(cond_context); + self.insert_current_side_effects_enabled(); + // Optimization: within the then branch we know the condition to be true, so replace + // any references of it within this branch with true. Likewise, do the same with false + // with the else branch. We must be careful not to replace the condition if it is a + // known constant, otherwise we can end up setting 1 = 0 or vice-versa. + if self.inserter.function.dfg.get_numeric_constant(old_condition).is_none() { + let known_value = self.inserter.function.dfg.make_constant(one, Type::bool()); + + self.inserter.map_value(old_condition, known_value); } + vec![self.branch_ends[if_entry], *else_destination, *then_destination] } - /// Insert a new instruction into the function's entry block. - /// Unlike push_instruction, this function will not map any ValueIds. - /// within the given instruction, nor will it modify self.values in any way. - fn insert_instruction(&mut self, instruction: Instruction, call_stack: CallStack) -> ValueId { - let block = self.inserter.function.entry_block(); - self.inserter - .function - .dfg - .insert_instruction_and_results(instruction, block, None, call_stack) - .first() + /// Switch context to the 'else-branch' + fn then_stop(&mut self, block: &BasicBlockId) -> Vec { + let mut cond_context = self.condition_stack.pop().unwrap(); + cond_context.then_branch.last_block = *block; + + let else_condition = + self.insert_instruction(Instruction::Not(cond_context.condition), CallStack::new()); + let else_condition = self.link_condition(else_condition); + + let zero = FieldElement::zero(); + // Make sure the else branch sees the previous values of each store + // rather than any values created in the 'then' branch. + let old_stores = std::mem::take(&mut cond_context.then_branch.store_values); + cond_context.then_branch.store_values = std::mem::take(&mut self.store_values); + self.undo_stores_in_then_branch(&cond_context.then_branch.store_values); + + let old_allocations = std::mem::take(&mut self.local_allocations); + let else_branch = ConditionalBranch { + old_condition: cond_context.then_branch.old_condition, + condition: else_condition, + store_values: old_stores, + local_allocations: old_allocations, + last_block: *block, + }; + let old_condition = else_branch.old_condition; + cond_context.then_branch.local_allocations.clear(); + cond_context.else_branch = Some(else_branch); + self.condition_stack.push(cond_context); + + self.insert_current_side_effects_enabled(); + // Optimization: within the then branch we know the condition to be true, so replace + // any references of it within this branch with true. Likewise, do the same with false + // with the else branch. We must be careful not to replace the condition if it is a + // known constant, otherwise we can end up setting 1 = 0 or vice-versa. + if self.inserter.function.dfg.get_numeric_constant(old_condition).is_none() { + let known_value = self.inserter.function.dfg.make_constant(zero, Type::bool()); + + self.inserter.map_value(old_condition, known_value); + } + assert_eq!(self.cfg.successors(*block).len(), 1); + vec![self.cfg.successors(*block).next().unwrap()] } - /// Inserts a new instruction into the function's entry block, using the given - /// control type variables to specify result types if needed. - /// Unlike push_instruction, this function will not map any ValueIds. - /// within the given instruction, nor will it modify self.values in any way. - fn insert_instruction_with_typevars( - &mut self, - instruction: Instruction, - ctrl_typevars: Option>, - ) -> InsertInstructionResult { - let block = self.inserter.function.entry_block(); - self.inserter.function.dfg.insert_instruction_and_results( - instruction, - block, - ctrl_typevars, - CallStack::new(), - ) - } + /// Process the 'exit' block of a conditional statement + fn else_stop(&mut self, block: &BasicBlockId) -> Vec { + let mut cond_context = self.condition_stack.pop().unwrap(); + if cond_context.else_branch.is_none() { + // then_stop() has not been called, this means that the conditional statement has no else branch + // so we simply do the then_stop() now + self.condition_stack.push(cond_context); + self.then_stop(block); + cond_context = self.condition_stack.pop().unwrap(); + } - /// Checks the branch condition on the top of the stack and uses it to build and insert an - /// `EnableSideEffects` instruction into the entry block. - /// - /// If the stack is empty, a "true" u1 constant is taken to be the active condition. This is - /// necessary for re-enabling side-effects when re-emerging to a branch depth of 0. - fn insert_current_side_effects_enabled(&mut self) { - let condition = match self.conditions.last() { - Some((_, cond)) => *cond, - None => { - self.inserter.function.dfg.make_constant(FieldElement::one(), Type::unsigned(1)) - } - }; - let enable_side_effects = Instruction::EnableSideEffects { condition }; - self.insert_instruction_with_typevars(enable_side_effects, None); - } + let mut else_branch = cond_context.else_branch.unwrap(); + let stores_in_branch = std::mem::replace(&mut self.store_values, else_branch.store_values); + self.local_allocations = std::mem::take(&mut else_branch.local_allocations); + else_branch.last_block = *block; + else_branch.store_values = stores_in_branch; + cond_context.else_branch = Some(else_branch); - /// Inline one branch of a jmpif instruction. - /// - /// This will continue inlining recursively until the next end block is reached where each branch - /// of the jmpif instruction is joined back into a single block. - /// - /// Within a branch of a jmpif instruction, we can assume the condition of the jmpif to be - /// always true or false, depending on which branch we're in. - /// - /// Returns the ending block / join block of this branch. - fn inline_branch( - &mut self, - jmpif_block: BasicBlockId, - destination: BasicBlockId, - old_condition: ValueId, - new_condition: ValueId, - condition_value: FieldElement, - ) -> Branch { - if destination == self.branch_ends[&jmpif_block] { - // If the branch destination is the same as the end of the branch, this must be the - // 'else' case of an if with no else - so there is no else branch. - Branch { - condition: new_condition, - // The last block here is somewhat arbitrary. It only matters that it has no Jmp - // args that will be merged by inline_branch_end. Since jmpifs don't have - // block arguments, it is safe to use the jmpif block here. - last_block: jmpif_block, - store_values: HashMap::default(), - } - } else { - self.push_condition(jmpif_block, new_condition); - self.insert_current_side_effects_enabled(); - let old_stores = std::mem::take(&mut self.store_values); - let old_allocations = std::mem::take(&mut self.local_allocations); - - // Optimization: within the then branch we know the condition to be true, so replace - // any references of it within this branch with true. Likewise, do the same with false - // with the else branch. We must be careful not to replace the condition if it is a - // known constant, otherwise we can end up setting 1 = 0 or vice-versa. - if self.inserter.function.dfg.get_numeric_constant(old_condition).is_none() { - let known_value = - self.inserter.function.dfg.make_constant(condition_value, Type::bool()); - - self.inserter.map_value(old_condition, known_value); - } + // We must remember to reset whether side effects are enabled when both branches + // end, in addition to resetting the value of old_condition since it is set to + // known to be true/false within the then/else branch respectively. + self.insert_current_side_effects_enabled(); - let final_block = self.inline_block(destination, &[]); + // We must map back to `then_condition` here. Mapping `old_condition` to itself would + // lose any previous mappings. + self.inserter + .map_value(cond_context.then_branch.old_condition, cond_context.then_branch.condition); - self.conditions.pop(); + // While there is a condition on the stack we don't compile outside the condition + // until it is popped. This ensures we inline the full then and else branches + // before continuing from the end of the conditional here where they can be merged properly. + let end = self.branch_ends[&cond_context.entry_block]; - let stores_in_branch = std::mem::replace(&mut self.store_values, old_stores); - self.local_allocations = old_allocations; + // Merge arguments and stores from the else/end branches + self.inline_branch_end(end, cond_context); - Branch { - condition: new_condition, - last_block: final_block, - store_values: stores_in_branch, - } - } + vec![self.cfg.successors(*block).next().unwrap()] } /// Inline the ending block of a branch, the point where all blocks from a jmpif instruction @@ -467,15 +519,17 @@ impl<'f> Context<'f> { fn inline_branch_end( &mut self, destination: BasicBlockId, - then_branch: Branch, - else_branch: Branch, + cond_context: ConditionalContext, ) -> BasicBlockId { assert_eq!(self.cfg.predecessors(destination).len(), 2); + let last_then = cond_context.then_branch.last_block; + let mut else_args = Vec::new(); + if cond_context.else_branch.is_some() { + let last_else = cond_context.else_branch.clone().unwrap().last_block; + else_args = self.inserter.function.dfg[last_else].terminator_arguments().to_vec(); + } - let then_args = - self.inserter.function.dfg[then_branch.last_block].terminator_arguments().to_vec(); - let else_args = - self.inserter.function.dfg[else_branch.last_block].terminator_arguments().to_vec(); + let then_args = self.inserter.function.dfg[last_then].terminator_arguments().to_vec(); let params = self.inserter.function.dfg.block_parameters(destination); assert_eq!(params.len(), then_args.len()); @@ -500,17 +554,64 @@ impl<'f> Context<'f> { // Cannot include this in the previous vecmap since it requires exclusive access to self let args = vecmap(args, |(then_arg, else_arg)| { value_merger.merge_values( - then_branch.condition, - else_branch.condition, + cond_context.then_branch.condition, + cond_context.else_branch.clone().unwrap().condition, then_arg, else_arg, ) }); - self.merge_stores(then_branch, else_branch); + self.merge_stores(cond_context.then_branch, cond_context.else_branch); + self.arguments_stack.pop(); + self.arguments_stack.pop(); + self.arguments_stack.push(args); + destination + } - // insert merge instruction - self.inline_block(destination, &args) + /// Insert a new instruction into the function's entry block. + /// Unlike push_instruction, this function will not map any ValueIds. + /// within the given instruction, nor will it modify self.values in any way. + fn insert_instruction(&mut self, instruction: Instruction, call_stack: CallStack) -> ValueId { + let block = self.inserter.function.entry_block(); + self.inserter + .function + .dfg + .insert_instruction_and_results(instruction, block, None, call_stack) + .first() + } + + /// Inserts a new instruction into the function's entry block, using the given + /// control type variables to specify result types if needed. + /// Unlike push_instruction, this function will not map any ValueIds. + /// within the given instruction, nor will it modify self.values in any way. + fn insert_instruction_with_typevars( + &mut self, + instruction: Instruction, + ctrl_typevars: Option>, + ) -> InsertInstructionResult { + let block = self.inserter.function.entry_block(); + self.inserter.function.dfg.insert_instruction_and_results( + instruction, + block, + ctrl_typevars, + CallStack::new(), + ) + } + + /// Checks the branch condition on the top of the stack and uses it to build and insert an + /// `EnableSideEffects` instruction into the entry block. + /// + /// If the stack is empty, a "true" u1 constant is taken to be the active condition. This is + /// necessary for re-enabling side-effects when re-emerging to a branch depth of 0. + fn insert_current_side_effects_enabled(&mut self) { + let condition = match self.get_last_condition() { + Some(cond) => cond, + None => { + self.inserter.function.dfg.make_constant(FieldElement::one(), Type::unsigned(1)) + } + }; + let enable_side_effects = Instruction::EnableSideEffects { condition }; + self.insert_instruction_with_typevars(enable_side_effects, None); } /// Merge any store instructions found in each branch. @@ -518,7 +619,11 @@ impl<'f> Context<'f> { /// This function relies on the 'then' branch being merged before the 'else' branch of a jmpif /// instruction. If this ordering is changed, the ordering that store values are merged within /// this function also needs to be changed to reflect that. - fn merge_stores(&mut self, then_branch: Branch, else_branch: Branch) { + fn merge_stores( + &mut self, + then_branch: ConditionalBranch, + else_branch: Option, + ) { // Address -> (then_value, else_value, value_before_the_if) let mut new_map = BTreeMap::new(); @@ -526,11 +631,13 @@ impl<'f> Context<'f> { new_map.insert(address, (store.new_value, store.old_value, store.old_value)); } - for (address, store) in else_branch.store_values { - if let Some(entry) = new_map.get_mut(&address) { - entry.1 = store.new_value; - } else { - new_map.insert(address, (store.old_value, store.new_value, store.old_value)); + if else_branch.is_some() { + for (address, store) in else_branch.clone().unwrap().store_values { + if let Some(entry) = new_map.get_mut(&address) { + entry.1 = store.new_value; + } else { + new_map.insert(address, (store.old_value, store.new_value, store.old_value)); + } } } @@ -544,8 +651,11 @@ impl<'f> Context<'f> { } let then_condition = then_branch.condition; - let else_condition = else_branch.condition; - + let else_condition = if let Some(branch) = else_branch { + branch.condition + } else { + self.inserter.function.dfg.make_constant(FieldElement::zero(), Type::bool()) + }; let block = self.inserter.function.entry_block(); let mut value_merger = @@ -607,35 +717,6 @@ impl<'f> Context<'f> { } } - /// Inline all instructions from the given destination block into the entry block. - /// Afterwards, check the block's terminator and continue inlining recursively. - /// - /// Returns the final block that was inlined. - /// - /// Expects that the `arguments` given are already translated via self.inserter.resolve. - /// If they are not, it is possible some values which no longer exist, such as block - /// parameters, will be kept in the program. - fn inline_block(&mut self, destination: BasicBlockId, arguments: &[ValueId]) -> BasicBlockId { - self.inserter.remember_block_params(destination, arguments); - - // If this is not a separate variable, clippy gets confused and says the to_vec is - // unnecessary, when removing it actually causes an aliasing/mutability error. - let instructions = self.inserter.function.dfg[destination].instructions().to_vec(); - - for instruction in instructions.iter() { - let results = self.push_instruction(*instruction); - let (instruction, _) = self.inserter.map_instruction(*instruction); - let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - capacity_tracker.collect_slice_information( - &instruction, - &mut self.slice_sizes, - results, - ); - } - - self.handle_terminator(destination) - } - /// Push the given instruction to the end of the entry block of the current function. /// /// Note that each ValueId of the instruction will be mapped via self.inserter.resolve. @@ -666,7 +747,7 @@ impl<'f> Context<'f> { instruction: Instruction, call_stack: CallStack, ) -> Instruction { - if let Some((_, condition)) = self.conditions.last().copied() { + if let Some(condition) = self.get_last_condition() { match instruction { Instruction::Constrain(lhs, rhs, message) => { // Replace constraint `lhs == rhs` with `condition * lhs == condition * rhs`. @@ -741,8 +822,8 @@ impl<'f> Context<'f> { } } - fn undo_stores_in_then_branch(&mut self, then_branch: &Branch) { - for (address, store) in &then_branch.store_values { + fn undo_stores_in_then_branch(&mut self, store_values: &HashMap) { + for (address, store) in store_values { let address = *address; let value = store.old_value; self.insert_instruction_with_typevars(Instruction::Store { address, value }, None); diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index 7cd0fe3084e..bdfc04f0bbe 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -5,6 +5,7 @@ use crate::ssa::ir::{ value::{Value, ValueId}, }; +use acvm::FieldElement; use fxhash::FxHashMap as HashMap; pub(crate) struct SliceCapacityTracker<'a> { @@ -62,21 +63,27 @@ impl<'a> SliceCapacityTracker<'a> { | Intrinsic::SlicePushFront | Intrinsic::SlicePopBack | Intrinsic::SliceInsert - | Intrinsic::SliceRemove => (1, 1), + | Intrinsic::SliceRemove => (Some(1), 1), // `pop_front` returns the popped element, and then the respective slice. // This means in the case of a slice with structs, the result index of the popped slice // will change depending on the number of elements in the struct. // For example, a slice with four elements will look as such in SSA: // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) // where v7 is the slice length and v8 is the popped slice itself. - Intrinsic::SlicePopFront => (1, results.len() - 1), + Intrinsic::SlicePopFront => (Some(1), results.len() - 1), + // The slice capacity of these intrinsics is not determined by the arguments of the function. + Intrinsic::ToBits(_) | Intrinsic::ToRadix(_) => (None, 1), _ => return, }; - let slice_contents = arguments[argument_index]; + let result_slice = results[result_index]; match intrinsic { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SliceInsert => { + let argument_index = argument_index + .expect("ICE: Should have an argument index for slice intrinsics"); + let slice_contents = arguments[argument_index]; + for arg in &arguments[(argument_index + 1)..] { let element_typ = self.dfg.type_of_value(*arg); if element_typ.contains_slice_element() { @@ -85,20 +92,35 @@ impl<'a> SliceCapacityTracker<'a> { } if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { let new_capacity = *contents_capacity + 1; - slice_sizes.insert(results[result_index], new_capacity); + slice_sizes.insert(result_slice, new_capacity); } } Intrinsic::SlicePopBack | Intrinsic::SliceRemove | Intrinsic::SlicePopFront => { + let argument_index = argument_index + .expect("ICE: Should have an argument index for slice intrinsics"); + let slice_contents = arguments[argument_index]; + // We do not decrement the size on intrinsics that could remove values from a slice. // This is because we could potentially go back to the smaller slice and not fill in dummies. // This pass should be tracking the potential max that a slice ***could be*** if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { let new_capacity = *contents_capacity - 1; - slice_sizes.insert(results[result_index], new_capacity); + slice_sizes.insert(result_slice, new_capacity); } } + Intrinsic::ToBits(_) => { + // Compiler sanity check + assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); + slice_sizes.insert(result_slice, FieldElement::max_num_bits() as usize); + } + Intrinsic::ToRadix(_) => { + // Compiler sanity check + assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); + slice_sizes + .insert(result_slice, FieldElement::max_num_bytes() as usize); + } _ => {} } } diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index d95295ae3c9..f3fa5d1d2f8 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -722,6 +722,11 @@ impl<'a> FunctionContext<'a> { let lhs = self.extract_current_value(&assign.lvalue)?; let rhs = self.codegen_expression(&assign.expression)?; + rhs.clone().for_each(|value| { + let value = value.eval(self); + self.builder.increment_array_reference_count(value); + }); + self.assign_new_value(lhs, rhs); Ok(Self::unit_value()) } diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index 2a252633a29..a9ecc1a53e5 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -3,7 +3,7 @@ use std::fmt::Display; use crate::token::{Attributes, Token}; use crate::{ - Distinctness, FunctionVisibility, Ident, Path, Pattern, Recoverable, Statement, StatementKind, + Distinctness, Ident, ItemVisibility, Path, Pattern, Recoverable, Statement, StatementKind, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, Visibility, }; use acvm::FieldElement; @@ -369,16 +369,11 @@ pub struct FunctionDefinition { // and `secondary` attributes (ones that do not change the function kind) pub attributes: Attributes, - /// True if this function was defined with the 'open' keyword - pub is_open: bool, - - pub is_internal: bool, - /// True if this function was defined with the 'unconstrained' keyword pub is_unconstrained: bool, /// Indicate if this function was defined with the 'pub' keyword - pub visibility: FunctionVisibility, + pub visibility: ItemVisibility, pub generics: UnresolvedGenerics, pub parameters: Vec, @@ -406,18 +401,6 @@ pub enum FunctionReturnType { Ty(UnresolvedType), } -/// Describes the types of smart contract functions that are allowed. -/// - All Noir programs in the non-contract context can be seen as `Secret`. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum ContractFunctionType { - /// This function will be executed in a private - /// context. - Secret, - /// This function will be executed in a public - /// context. - Open, -} - #[derive(Debug, PartialEq, Eq, Clone)] pub enum ArrayLiteral { Standard(Vec), @@ -674,10 +657,8 @@ impl FunctionDefinition { FunctionDefinition { name: name.clone(), attributes: Attributes::empty(), - is_open: false, - is_internal: false, is_unconstrained: false, - visibility: FunctionVisibility::Private, + visibility: ItemVisibility::Private, generics: generics.clone(), parameters: p, body: body.clone(), diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index 29edbaca594..8a420c32fb8 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -354,8 +354,8 @@ impl UnresolvedTypeExpression { } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -/// Represents whether the function can be called outside its module/crate -pub enum FunctionVisibility { +/// Represents whether the definition can be referenced outside its module/crate +pub enum ItemVisibility { Public, Private, PublicCrate, diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index f39b71405d3..387840b57c4 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -240,6 +240,17 @@ pub trait Recoverable { fn error(span: Span) -> Self; } +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct ModuleDeclaration { + pub ident: Ident, +} + +impl std::fmt::Display for ModuleDeclaration { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "mod {}", self.ident) + } +} + #[derive(Debug, PartialEq, Eq, Clone)] pub struct ImportStatement { pub path: Path, diff --git a/compiler/noirc_frontend/src/debug/mod.rs b/compiler/noirc_frontend/src/debug/mod.rs index a88567fcaf9..05916502d73 100644 --- a/compiler/noirc_frontend/src/debug/mod.rs +++ b/compiler/noirc_frontend/src/debug/mod.rs @@ -4,9 +4,11 @@ use crate::{ ast::{Path, PathKind}, parser::{Item, ItemKind}, }; +use noirc_errors::debug_info::{DebugFnId, DebugFunction}; use noirc_errors::{Span, Spanned}; use std::collections::HashMap; use std::collections::VecDeque; +use std::mem::take; const MAX_MEMBER_ASSIGN_DEPTH: usize = 8; @@ -26,8 +28,12 @@ pub struct DebugInstrumenter { // all field names referenced when assigning to a member of a variable pub field_names: HashMap, + // all collected function metadata (name + argument names) + pub functions: HashMap, + next_var_id: u32, next_field_name_id: u32, + next_fn_id: u32, // last seen variable names and their IDs grouped by scope scope: Vec>, @@ -38,9 +44,11 @@ impl Default for DebugInstrumenter { Self { variables: HashMap::default(), field_names: HashMap::default(), + functions: HashMap::default(), scope: vec![], next_var_id: 0, next_field_name_id: 1, + next_fn_id: 0, } } } @@ -76,10 +84,22 @@ impl DebugInstrumenter { field_name_id } + fn insert_function(&mut self, fn_name: String, arguments: Vec) -> DebugFnId { + let fn_id = DebugFnId(self.next_fn_id); + self.next_fn_id += 1; + self.functions.insert(fn_id, DebugFunction { name: fn_name, arg_names: arguments }); + fn_id + } + fn walk_fn(&mut self, func: &mut ast::FunctionDefinition) { + let func_name = func.name.0.contents.clone(); + let func_args = + func.parameters.iter().map(|param| pattern_to_string(¶m.pattern)).collect(); + let fn_id = self.insert_function(func_name, func_args); + let enter_stmt = build_debug_call_stmt("enter", fn_id, func.span); self.scope.push(HashMap::default()); - let set_fn_params = func + let set_fn_params: Vec<_> = func .parameters .iter() .flat_map(|param| { @@ -93,10 +113,21 @@ impl DebugInstrumenter { }) .collect(); - self.walk_scope(&mut func.body.0, func.span); + let func_body = &mut func.body.0; + let mut statements = take(func_body); + + self.walk_scope(&mut statements, func.span); - // prepend fn params: - func.body.0 = [set_fn_params, func.body.0.clone()].concat(); + // walk_scope ensures that the last statement is the return value of the function + let last_stmt = statements.pop().expect("at least one statement after walk_scope"); + let exit_stmt = build_debug_call_stmt("exit", fn_id, last_stmt.span); + + // rebuild function body + func_body.push(enter_stmt); + func_body.extend(set_fn_params); + func_body.extend(statements); + func_body.push(exit_stmt); + func_body.push(last_stmt); } // Modify a vector of statements in-place, adding instrumentation for sets and drops. @@ -427,6 +458,8 @@ impl DebugInstrumenter { use dep::__debug::{{ __debug_var_assign, __debug_var_drop, + __debug_fn_enter, + __debug_fn_exit, __debug_dereference_assign, {member_assigns}, }};"# @@ -451,14 +484,32 @@ pub fn build_debug_crate_file() -> String { } #[oracle(__debug_var_drop)] - unconstrained fn __debug_var_drop_oracle(_var_id: u32) {} - unconstrained fn __debug_var_drop_inner(var_id: u32) { + unconstrained fn __debug_var_drop_oracle(_var_id: u32) {} + unconstrained fn __debug_var_drop_inner(var_id: u32) { __debug_var_drop_oracle(var_id); } - pub fn __debug_var_drop(var_id: u32) { + pub fn __debug_var_drop(var_id: u32) { __debug_var_drop_inner(var_id); } + #[oracle(__debug_fn_enter)] + unconstrained fn __debug_fn_enter_oracle(_fn_id: u32) {} + unconstrained fn __debug_fn_enter_inner(fn_id: u32) { + __debug_fn_enter_oracle(fn_id); + } + pub fn __debug_fn_enter(fn_id: u32) { + __debug_fn_enter_inner(fn_id); + } + + #[oracle(__debug_fn_exit)] + unconstrained fn __debug_fn_exit_oracle(_fn_id: u32) {} + unconstrained fn __debug_fn_exit_inner(fn_id: u32) { + __debug_fn_exit_oracle(fn_id); + } + pub fn __debug_fn_exit(fn_id: u32) { + __debug_fn_exit_inner(fn_id); + } + #[oracle(__debug_dereference_assign)] unconstrained fn __debug_dereference_assign_oracle(_var_id: u32, _value: T) {} unconstrained fn __debug_dereference_assign_inner(var_id: u32, value: T) { @@ -561,6 +612,21 @@ fn build_assign_member_stmt( ast::Statement { kind: ast::StatementKind::Semi(ast::Expression { kind, span }), span } } +fn build_debug_call_stmt(fname: &str, fn_id: DebugFnId, span: Span) -> ast::Statement { + let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { + func: Box::new(ast::Expression { + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![ident(&format!["__debug_fn_{fname}"], span)], + kind: PathKind::Plain, + span, + }), + span, + }), + arguments: vec![uint_expr(fn_id.0 as u128, span)], + })); + ast::Statement { kind: ast::StatementKind::Semi(ast::Expression { kind, span }), span } +} + fn pattern_vars(pattern: &ast::Pattern) -> Vec<(ast::Ident, bool)> { let mut vars = vec![]; let mut stack = VecDeque::from([(pattern, false)]); @@ -585,6 +651,30 @@ fn pattern_vars(pattern: &ast::Pattern) -> Vec<(ast::Ident, bool)> { vars } +fn pattern_to_string(pattern: &ast::Pattern) -> String { + match pattern { + ast::Pattern::Identifier(id) => id.0.contents.clone(), + ast::Pattern::Mutable(mpat, _, _) => format!("mut {}", pattern_to_string(mpat.as_ref())), + ast::Pattern::Tuple(elements, _) => format!( + "({})", + elements.iter().map(pattern_to_string).collect::>().join(", ") + ), + ast::Pattern::Struct(name, fields, _) => { + format!( + "{} {{ {} }}", + name, + fields + .iter() + .map(|(field_ident, field_pattern)| { + format!("{}: {}", &field_ident.0.contents, pattern_to_string(field_pattern)) + }) + .collect::>() + .join(", "), + ) + } + } +} + fn ident(s: &str, span: Span) -> ast::Ident { ast::Ident(Spanned::from(span, s.to_string())) } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 7f36af5b30e..8ea9d475539 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -5,13 +5,14 @@ use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; use crate::hir::resolution::import::{resolve_import, ImportDirective}; -use crate::hir::resolution::resolver::Resolver; use crate::hir::resolution::{ collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, resolve_impls, resolve_structs, resolve_trait_by_path, resolve_trait_impls, resolve_traits, resolve_type_aliases, }; -use crate::hir::type_check::{type_check_func, TypeCheckError, TypeChecker}; +use crate::hir::type_check::{ + check_trait_impl_method_matches_declaration, type_check_func, TypeCheckError, TypeChecker, +}; use crate::hir::Context; use crate::macros_api::{MacroError, MacroProcessor}; @@ -20,8 +21,7 @@ use crate::node_interner::{FuncId, GlobalId, NodeInterner, StructId, TraitId, Ty use crate::parser::{ParserError, SortedModule}; use crate::{ ExpressionKind, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, - NoirTypeAlias, Path, PathKind, Type, TypeBindings, UnresolvedGenerics, - UnresolvedTraitConstraint, UnresolvedType, + NoirTypeAlias, Path, PathKind, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, }; use fm::FileId; use iter_extended::vecmap; @@ -207,7 +207,7 @@ impl DefCollector { context: &mut Context, ast: SortedModule, root_file_id: FileId, - macro_processors: Vec<&dyn MacroProcessor>, + macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; let crate_id = def_map.krate; @@ -220,11 +220,7 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - errors.extend(CrateDefMap::collect_defs( - dep.crate_id, - context, - macro_processors.clone(), - )); + errors.extend(CrateDefMap::collect_defs(dep.crate_id, context, macro_processors)); let dep_def_root = context.def_map(&dep.crate_id).expect("ice: def map was just created").root; @@ -250,6 +246,7 @@ impl DefCollector { crate_root, crate_id, context, + macro_processors, )); let submodules = vecmap(def_collector.def_map.modules().iter(), |(index, _)| index); @@ -257,9 +254,9 @@ impl DefCollector { context.def_maps.insert(crate_id, def_collector.def_map); // TODO(#4653): generalize this function - for macro_processor in ¯o_processors { + for macro_processor in macro_processors { macro_processor - .process_unresolved_traits_impls( + .process_collected_defs( &crate_id, context, &def_collector.collected_traits_impls, @@ -282,7 +279,7 @@ impl DefCollector { // Resolve unresolved imports collected from the crate, one by one. for collected_import in def_collector.collected_imports { - match resolve_import(crate_id, collected_import, &context.def_maps) { + match resolve_import(crate_id, &collected_import, &context.def_maps) { Ok(resolved_import) => { // Populate module namespaces according to the imports used let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); @@ -372,12 +369,12 @@ impl DefCollector { &mut errors, )); - functions.extend(resolve_trait_impls( + let impl_functions = resolve_trait_impls( context, def_collector.collected_traits_impls, crate_id, &mut errors, - )); + ); for macro_processor in macro_processors { macro_processor.process_typed_ast(&crate_id, context).unwrap_or_else( @@ -391,6 +388,8 @@ impl DefCollector { errors.extend(type_check_globals(&mut context.def_interner, resolved_globals.globals)); errors.extend(type_check_functions(&mut context.def_interner, functions)); + errors.extend(type_check_trait_impl_signatures(&mut context.def_interner, &impl_functions)); + errors.extend(type_check_functions(&mut context.def_interner, impl_functions)); errors } } @@ -472,158 +471,24 @@ fn type_check_functions( .into_iter() .flat_map(|(file, func)| { type_check_func(interner, func) - .iter() - .cloned() + .into_iter() .map(|e| (e.into(), file)) .collect::>() }) .collect() } -// TODO(vitkov): Move this out of here and into type_check -#[allow(clippy::too_many_arguments)] -pub(crate) fn check_methods_signatures( - resolver: &mut Resolver, - impl_methods: &[(FileId, FuncId)], - trait_id: TraitId, - trait_name_span: Span, - // These are the generics on the trait itself from the impl. - // E.g. in `impl Foo for Bar`, this is `vec![A, B]`. - trait_generics: Vec, - trait_impl_generic_count: usize, - file_id: FileId, - errors: &mut Vec<(CompilationError, FileId)>, -) { - let self_type = resolver.get_self_type().expect("trait impl must have a Self type").clone(); - let trait_generics = vecmap(trait_generics, |typ| resolver.resolve_type(typ)); - - // Temporarily bind the trait's Self type to self_type so we can type check - let the_trait = resolver.interner.get_trait_mut(trait_id); - the_trait.self_type_typevar.bind(self_type); - - if trait_generics.len() != the_trait.generics.len() { - let error = DefCollectorErrorKind::MismatchGenericCount { - actual_generic_count: trait_generics.len(), - expected_generic_count: the_trait.generics.len(), - // Preferring to use 'here' over a more precise term like 'this reference' - // to try to make the error easier to understand for newer users. - location: "here it", - origin: the_trait.name.to_string(), - span: trait_name_span, - }; - errors.push((error.into(), file_id)); - } - - // We also need to bind the traits generics to the trait's generics on the impl - for (generic, binding) in the_trait.generics.iter().zip(trait_generics) { - generic.bind(binding); - } - - // Temporarily take the trait's methods so we can use both them and a mutable reference - // to the interner within the loop. - let trait_methods = std::mem::take(&mut the_trait.methods); - - for (file_id, func_id) in impl_methods { - let func_name = resolver.interner.function_name(func_id).to_owned(); - - // This is None in the case where the impl block has a method that's not part of the trait. - // If that's the case, a `MethodNotInTrait` error has already been thrown, and we can ignore - // the impl method, since there's nothing in the trait to match its signature against. - if let Some(trait_method) = - trait_methods.iter().find(|method| method.name.0.contents == func_name) - { - let impl_method = resolver.interner.function_meta(func_id); - - let impl_method_generic_count = - impl_method.typ.generic_count() - trait_impl_generic_count; - - // We subtract 1 here to account for the implicit generic `Self` type that is on all - // traits (and thus trait methods) but is not required (or allowed) for users to specify. - let the_trait = resolver.interner.get_trait(trait_id); - let trait_method_generic_count = - trait_method.generics().len() - 1 - the_trait.generics.len(); - - if impl_method_generic_count != trait_method_generic_count { - let trait_name = resolver.interner.get_trait(trait_id).name.clone(); - - let error = DefCollectorErrorKind::MismatchGenericCount { - actual_generic_count: impl_method_generic_count, - expected_generic_count: trait_method_generic_count, - origin: format!("{}::{}", trait_name, func_name), - location: "this method", - span: impl_method.location.span, - }; - errors.push((error.into(), *file_id)); - } - - // This instantiation is technically not needed. We could bind each generic in the - // trait function to the impl's corresponding generic but to do so we'd have to rely - // on the trait function's generics being first in the generic list, since the same - // list also contains the generic `Self` variable, and any generics on the trait itself. - // - // Instantiating the impl method's generics here instead is a bit less precise but - // doesn't rely on any orderings that may be changed. - let impl_function_type = impl_method.typ.instantiate(resolver.interner).0; - - let mut bindings = TypeBindings::new(); - let mut typecheck_errors = Vec::new(); - - if let Type::Function(impl_params, impl_return, _) = impl_function_type.as_monotype() { - if trait_method.arguments().len() != impl_params.len() { - let error = DefCollectorErrorKind::MismatchTraitImplementationNumParameters { - actual_num_parameters: impl_method.parameters.0.len(), - expected_num_parameters: trait_method.arguments().len(), - trait_name: resolver.interner.get_trait(trait_id).name.to_string(), - method_name: func_name.to_string(), - span: impl_method.location.span, - }; - errors.push((error.into(), *file_id)); - } - - // Check the parameters of the impl method against the parameters of the trait method - let args = trait_method.arguments().iter(); - let args_and_params = args.zip(impl_params).zip(&impl_method.parameters.0); - - for (parameter_index, ((expected, actual), (hir_pattern, _, _))) in - args_and_params.enumerate() - { - if expected.try_unify(actual, &mut bindings).is_err() { - typecheck_errors.push(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name: func_name.to_string(), - expected_typ: expected.to_string(), - actual_typ: actual.to_string(), - parameter_span: hir_pattern.span(), - parameter_index: parameter_index + 1, - }); - } - } - - if trait_method.return_type().try_unify(impl_return, &mut bindings).is_err() { - let impl_method = resolver.interner.function_meta(func_id); - let ret_type_span = impl_method.return_type.get_type().span; - let expr_span = ret_type_span.expect("return type must always have a span"); - - let expected_typ = trait_method.return_type().to_string(); - let expr_typ = impl_method.return_type().to_string(); - let error = TypeCheckError::TypeMismatch { expr_typ, expected_typ, expr_span }; - typecheck_errors.push(error); - } - } else { - unreachable!( - "impl_function_type is not a function type, it is: {impl_function_type}" - ); - } - - errors.extend(typecheck_errors.iter().cloned().map(|e| (e.into(), *file_id))); - } - } - - // Now unbind `Self` and the trait's generics - let the_trait = resolver.interner.get_trait_mut(trait_id); - the_trait.set_methods(trait_methods); - the_trait.self_type_typevar.unbind(the_trait.self_type_typevar_id); - - for generic in &the_trait.generics { - generic.unbind(generic.id()); - } +fn type_check_trait_impl_signatures( + interner: &mut NodeInterner, + file_func_ids: &[(FileId, FuncId)], +) -> Vec<(CompilationError, fm::FileId)> { + file_func_ids + .iter() + .flat_map(|(file, func)| { + check_trait_impl_method_matches_declaration(interner, *func) + .into_iter() + .map(|e| (e.into(), *file)) + .collect::>() + }) + .collect() } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 77224cc311c..ae99e61e534 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -7,10 +7,11 @@ use noirc_errors::Location; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, + macros_api::MacroProcessor, node_interner::{FunctionModifiers, TraitId, TypeAliasId}, parser::{SortedModule, SortedSubModule}, - FunctionDefinition, Ident, LetStatement, NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, - NoirTypeAlias, TraitImplItem, TraitItem, TypeImpl, + FunctionDefinition, Ident, LetStatement, ModuleDeclaration, NoirFunction, NoirStruct, + NoirTrait, NoirTraitImpl, NoirTypeAlias, TraitImplItem, TraitItem, TypeImpl, }; use super::{ @@ -41,16 +42,28 @@ pub fn collect_defs( module_id: LocalModuleId, crate_id: CrateId, context: &mut Context, + macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut collector = ModCollector { def_collector, file_id, module_id }; let mut errors: Vec<(CompilationError, FileId)> = vec![]; // First resolve the module declarations for decl in ast.module_decls { - errors.extend(collector.parse_module_declaration(context, &decl, crate_id)); + errors.extend(collector.parse_module_declaration( + context, + &decl, + crate_id, + macro_processors, + )); } - errors.extend(collector.collect_submodules(context, crate_id, ast.submodules, file_id)); + errors.extend(collector.collect_submodules( + context, + crate_id, + ast.submodules, + file_id, + macro_processors, + )); // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved for import in ast.imports { @@ -394,12 +407,10 @@ impl<'a> ModCollector<'a> { let modifiers = FunctionModifiers { name: name.to_string(), - visibility: crate::FunctionVisibility::Public, + visibility: crate::ItemVisibility::Public, // TODO(Maddiaa): Investigate trait implementations with attributes see: https://github.com/noir-lang/noir/issues/2629 attributes: crate::token::Attributes::empty(), is_unconstrained: false, - contract_function_type: None, - is_internal: None, }; let location = Location::new(name.span(), self.file_id); @@ -494,6 +505,7 @@ impl<'a> ModCollector<'a> { crate_id: CrateId, submodules: Vec, file_id: FileId, + macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; for submodule in submodules { @@ -506,6 +518,7 @@ impl<'a> ModCollector<'a> { child, crate_id, context, + macro_processors, )); } Err(error) => { @@ -522,15 +535,16 @@ impl<'a> ModCollector<'a> { fn parse_module_declaration( &mut self, context: &mut Context, - mod_name: &Ident, + mod_decl: &ModuleDeclaration, crate_id: CrateId, + macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; let child_file_id = - match find_module(&context.file_manager, self.file_id, &mod_name.0.contents) { + match find_module(&context.file_manager, self.file_id, &mod_decl.ident.0.contents) { Ok(child_file_id) => child_file_id, Err(expected_path) => { - let mod_name = mod_name.clone(); + let mod_name = mod_decl.ident.clone(); let err = DefCollectorErrorKind::UnresolvedModuleDecl { mod_name, expected_path }; errors.push((err.into(), self.file_id)); @@ -538,17 +552,17 @@ impl<'a> ModCollector<'a> { } }; - let location = Location { file: self.file_id, span: mod_name.span() }; + let location = Location { file: self.file_id, span: mod_decl.ident.span() }; if let Some(old_location) = context.visited_files.get(&child_file_id) { let error = DefCollectorErrorKind::ModuleAlreadyPartOfCrate { - mod_name: mod_name.clone(), + mod_name: mod_decl.ident.clone(), span: location.span, }; errors.push((error.into(), location.file)); let error = DefCollectorErrorKind::ModuleOriginallyDefined { - mod_name: mod_name.clone(), + mod_name: mod_decl.ident.clone(), span: old_location.span, }; errors.push((error.into(), old_location.file)); @@ -559,14 +573,26 @@ impl<'a> ModCollector<'a> { // Parse the AST for the module we just found and then recursively look for it's defs let (ast, parsing_errors) = context.parsed_file_results(child_file_id); - let ast = ast.into_sorted(); + let mut ast = ast.into_sorted(); + + for macro_processor in macro_processors { + match macro_processor.process_untyped_ast(ast.clone(), &crate_id, context) { + Ok(processed_ast) => { + ast = processed_ast; + } + Err((error, file_id)) => { + let def_error = DefCollectorErrorKind::MacroError(error); + errors.push((def_error.into(), file_id)); + } + } + } errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), child_file_id)).collect::>(), ); // Add module into def collector and get a ModuleId - match self.push_child_module(mod_name, child_file_id, true, false) { + match self.push_child_module(&mod_decl.ident, child_file_id, true, false) { Ok(child_mod_id) => { errors.extend(collect_defs( self.def_collector, @@ -575,6 +601,7 @@ impl<'a> ModCollector<'a> { child_mod_id, crate_id, context, + macro_processors, )); } Err(error) => { diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index de45be48c4e..29daf5d6369 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -41,14 +41,6 @@ pub enum DefCollectorErrorKind { OverlappingImplNote { span: Span }, #[error("Cannot `impl` a type defined outside the current crate")] ForeignImpl { span: Span, type_name: String }, - #[error("Mismatched number of parameters in trait implementation")] - MismatchTraitImplementationNumParameters { - actual_num_parameters: usize, - expected_num_parameters: usize, - trait_name: String, - method_name: String, - span: Span, - }, #[error("Mismatched number of generics in {location}")] MismatchGenericCount { actual_generic_count: usize, @@ -176,18 +168,6 @@ impl From for Diagnostic { "".to_string(), trait_path.span(), ), - DefCollectorErrorKind::MismatchTraitImplementationNumParameters { - expected_num_parameters, - actual_num_parameters, - trait_name, - method_name, - span, - } => { - let plural = if expected_num_parameters == 1 { "" } else { "s" }; - let primary_message = format!( - "`{trait_name}::{method_name}` expects {expected_num_parameters} parameter{plural}, but this method has {actual_num_parameters}"); - Diagnostic::simple_error(primary_message, "".to_string(), span) - } DefCollectorErrorKind::MismatchGenericCount { actual_generic_count, expected_generic_count, diff --git a/compiler/noirc_frontend/src/hir/def_map/item_scope.rs b/compiler/noirc_frontend/src/hir/def_map/item_scope.rs index 523def89518..178b91e1e84 100644 --- a/compiler/noirc_frontend/src/hir/def_map/item_scope.rs +++ b/compiler/noirc_frontend/src/hir/def_map/item_scope.rs @@ -1,16 +1,11 @@ use super::{namespace::PerNs, ModuleDefId, ModuleId}; use crate::{ node_interner::{FuncId, TraitId}, - Ident, + Ident, ItemVisibility, }; use std::collections::{hash_map::Entry, HashMap}; -type Scope = HashMap, (ModuleDefId, Visibility, bool /*is_prelude*/)>; - -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -pub enum Visibility { - Public, -} +type Scope = HashMap, (ModuleDefId, ItemVisibility, bool /*is_prelude*/)>; #[derive(Default, Debug, PartialEq, Eq)] pub struct ItemScope { @@ -55,12 +50,12 @@ impl ItemScope { Err((old_ident.clone(), name)) } } else { - trait_hashmap.insert(trait_id, (mod_def, Visibility::Public, is_prelude)); + trait_hashmap.insert(trait_id, (mod_def, ItemVisibility::Public, is_prelude)); Ok(()) } } else { let mut trait_hashmap = HashMap::new(); - trait_hashmap.insert(trait_id, (mod_def, Visibility::Public, is_prelude)); + trait_hashmap.insert(trait_id, (mod_def, ItemVisibility::Public, is_prelude)); map.insert(name, trait_hashmap); Ok(()) } diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 8721bdb6c3c..1326ffca9f7 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -73,7 +73,7 @@ impl CrateDefMap { pub fn collect_defs( crate_id: CrateId, context: &mut Context, - macro_processors: Vec<&dyn MacroProcessor>, + macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled // XXX: There is probably a better alternative for this. @@ -90,7 +90,7 @@ impl CrateDefMap { let (ast, parsing_errors) = context.parsed_file_results(root_file_id); let mut ast = ast.into_sorted(); - for macro_processor in ¯o_processors { + for macro_processor in macro_processors { match macro_processor.process_untyped_ast(ast.clone(), &crate_id, context) { Ok(processed_ast) => { ast = processed_ast; @@ -115,13 +115,7 @@ impl CrateDefMap { }; // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( - def_map, - context, - ast, - root_file_id, - macro_processors.clone(), - )); + errors.extend(DefCollector::collect(def_map, context, ast, root_file_id, macro_processors)); errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), root_file_id)).collect::>(), diff --git a/compiler/noirc_frontend/src/hir/def_map/namespace.rs b/compiler/noirc_frontend/src/hir/def_map/namespace.rs index ca14d9f8617..5e349f46e14 100644 --- a/compiler/noirc_frontend/src/hir/def_map/namespace.rs +++ b/compiler/noirc_frontend/src/hir/def_map/namespace.rs @@ -1,15 +1,16 @@ -use super::{item_scope::Visibility, ModuleDefId}; +use super::ModuleDefId; +use crate::ItemVisibility; // This works exactly the same as in r-a, just simplified #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub struct PerNs { - pub types: Option<(ModuleDefId, Visibility, bool)>, - pub values: Option<(ModuleDefId, Visibility, bool)>, + pub types: Option<(ModuleDefId, ItemVisibility, bool)>, + pub values: Option<(ModuleDefId, ItemVisibility, bool)>, } impl PerNs { pub fn types(t: ModuleDefId) -> PerNs { - PerNs { types: Some((t, Visibility::Public, false)), values: None } + PerNs { types: Some((t, ItemVisibility::Public, false)), values: None } } pub fn take_types(self) -> Option { @@ -24,7 +25,7 @@ impl PerNs { self.types.map(|it| it.0).into_iter().chain(self.values.map(|it| it.0)) } - pub fn iter_items(self) -> impl Iterator { + pub fn iter_items(self) -> impl Iterator { self.types.into_iter().chain(self.values) } diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index d2fe67da38c..30a1ba2ee34 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -64,14 +64,10 @@ pub enum ResolverError { IncorrectGenericCount { span: Span, item_name: String, actual: usize, expected: usize }, #[error("{0}")] ParserError(Box), - #[error("Function is not defined in a contract yet sets its contract visibility")] - ContractFunctionTypeInNormalFunction { span: Span }, #[error("Cannot create a mutable reference to {variable}, it was declared to be immutable")] MutableReferenceToImmutableVariable { variable: String, span: Span }, #[error("Mutable references to array indices are unsupported")] MutableReferenceToArrayElement { span: Span }, - #[error("Function is not defined in a contract yet sets is_internal")] - ContractFunctionInternalInNormalFunction { span: Span }, #[error("Numeric constants should be printed without formatting braces")] NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] @@ -80,8 +76,6 @@ pub enum ResolverError { PrivateFunctionCalled { name: String, span: Span }, #[error("{name} is not visible from the current crate")] NonCrateFunctionCalled { name: String, span: Span }, - #[error("Only sized types may be used in the entry point to a program")] - InvalidTypeForEntryPoint { span: Span }, #[error("Nested slices are not supported")] NestedSlices { span: Span }, #[error("#[recursive] attribute is only allowed on entry points to a program")] @@ -278,22 +272,12 @@ impl From for Diagnostic { ) } ResolverError::ParserError(error) => (*error).into(), - ResolverError::ContractFunctionTypeInNormalFunction { span } => Diagnostic::simple_error( - "Only functions defined within contracts can set their contract function type".into(), - "Non-contract functions cannot be 'open'".into(), - span, - ), ResolverError::MutableReferenceToImmutableVariable { variable, span } => { Diagnostic::simple_error(format!("Cannot mutably reference the immutable variable {variable}"), format!("{variable} is immutable"), span) }, ResolverError::MutableReferenceToArrayElement { span } => { Diagnostic::simple_error("Mutable references to array elements are currently unsupported".into(), "Try storing the element in a fresh variable first".into(), span) }, - ResolverError::ContractFunctionInternalInNormalFunction { span } => Diagnostic::simple_error( - "Only functions defined within contracts can set their functions to be internal".into(), - "Non-contract functions cannot be 'internal'".into(), - span, - ), ResolverError::NumericConstantInFormatString { name, span } => Diagnostic::simple_error( format!("cannot find `{name}` in this scope "), "Numeric constants should be printed without formatting braces".to_string(), @@ -309,9 +293,6 @@ impl From for Diagnostic { ResolverError::NonCrateFunctionCalled { span, name } => Diagnostic::simple_warning( format!("{name} is not visible from the current crate"), format!("{name} is only visible within its crate"), span), - ResolverError::InvalidTypeForEntryPoint { span } => Diagnostic::simple_error( - "Only sized types may be used in the entry point to a program".to_string(), - "Slices, references, or any type containing them may not be used in main or a contract function".to_string(), span), ResolverError::NestedSlices { span } => Diagnostic::simple_error( "Nested slices are not supported".into(), "Try to use a constant sized array instead".into(), diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index e6ac33053a0..9c8418daf80 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -52,7 +52,7 @@ impl From for CustomDiagnostic { pub fn resolve_import( crate_id: CrateId, - import_directive: ImportDirective, + import_directive: &ImportDirective, def_maps: &BTreeMap, ) -> Result { let def_map = &def_maps[&crate_id]; @@ -62,10 +62,10 @@ pub fn resolve_import( let module_scope = import_directive.module_id; let resolved_namespace = - resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) + resolve_path_to_ns(import_directive, def_map, def_maps, allow_contracts) .map_err(|error| (error, module_scope))?; - let name = resolve_path_name(&import_directive); + let name = resolve_path_name(import_directive); Ok(ResolvedImport { name, resolved_namespace, diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 7f9e48353a7..00b1b443430 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -37,11 +37,11 @@ use crate::{ StatementKind, }; use crate::{ - ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionDefinition, - FunctionReturnType, FunctionVisibility, Generics, LValue, NoirStruct, NoirTypeAlias, Param, - Path, PathKind, Pattern, Shared, StructType, Type, TypeAlias, TypeVariable, TypeVariableKind, - UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, - UnresolvedTypeExpression, Visibility, ERROR_IDENT, + ArrayLiteral, Distinctness, ForRange, FunctionDefinition, FunctionReturnType, Generics, + ItemVisibility, LValue, NoirStruct, NoirTypeAlias, Param, Path, PathKind, Pattern, Shared, + StructType, Type, TypeAlias, TypeVariable, TypeVariableKind, UnaryOp, UnresolvedGenerics, + UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, + Visibility, ERROR_IDENT, }; use fm::FileId; use iter_extended::vecmap; @@ -217,6 +217,7 @@ impl<'a> Resolver<'a> { pub fn resolve_trait_function( &mut self, name: &Ident, + generics: &UnresolvedGenerics, parameters: &[(Ident, UnresolvedType)], return_type: &FunctionReturnType, where_clause: &[UnresolvedTraitConstraint], @@ -233,11 +234,9 @@ impl<'a> Resolver<'a> { let def = FunctionDefinition { name: name.clone(), attributes: Attributes::empty(), - is_open: false, - is_internal: false, is_unconstrained: false, - visibility: FunctionVisibility::Public, // Trait functions are always public - generics: Vec::new(), // self.generics should already be set + visibility: ItemVisibility::Public, // Trait functions are always public + generics: generics.clone(), parameters: vecmap(parameters, |(name, typ)| Param { visibility: Visibility::Private, pattern: Pattern::Identifier(name.clone()), @@ -911,10 +910,6 @@ impl<'a> Resolver<'a> { }); } - if self.is_entry_point_function(func) { - self.verify_type_valid_for_program_input(&typ); - } - let pattern = self.resolve_pattern(pattern, DefinitionKind::Local(None)); let typ = self.resolve_type_inner(typ, &mut generics); @@ -976,14 +971,18 @@ impl<'a> Resolver<'a> { self.interner.push_definition_type(name_ident.id, typ.clone()); - self.handle_function_type(&func_id); - self.handle_is_function_internal(&func_id); + let direct_generics = func.def.generics.iter(); + let direct_generics = direct_generics + .filter_map(|generic| self.find_generic(&generic.0.contents)) + .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .collect(); FuncMeta { name: name_ident, kind: func.kind, location, typ, + direct_generics, trait_impl: self.current_trait_impl, parameters: parameters.into(), return_type: func.def.return_type.clone(), @@ -991,6 +990,7 @@ impl<'a> Resolver<'a> { return_distinctness: func.def.return_distinctness, has_body: !func.def.body.is_empty(), trait_constraints: self.resolve_trait_constraints(&func.def.where_clause), + is_entry_point: self.is_entry_point_function(func), } } @@ -1018,34 +1018,14 @@ impl<'a> Resolver<'a> { /// True if the `distinct` keyword is allowed on a function's return type fn distinct_allowed(&self, func: &NoirFunction) -> bool { if self.in_contract { - // "open" and "unconstrained" functions are compiled to brillig and thus duplication of + // "unconstrained" functions are compiled to brillig and thus duplication of // witness indices in their abis is not a concern. - !func.def.is_unconstrained && !func.def.is_open + !func.def.is_unconstrained } else { func.name() == MAIN_FUNCTION } } - fn handle_function_type(&mut self, function: &FuncId) { - let function_type = self.interner.function_modifiers(function).contract_function_type; - - if !self.in_contract && function_type == Some(ContractFunctionType::Open) { - let span = self.interner.function_ident(function).span(); - self.errors.push(ResolverError::ContractFunctionTypeInNormalFunction { span }); - self.interner.function_modifiers_mut(function).contract_function_type = None; - } - } - - fn handle_is_function_internal(&mut self, function: &FuncId) { - if !self.in_contract { - if self.interner.function_modifiers(function).is_internal == Some(true) { - let span = self.interner.function_ident(function).span(); - self.push_err(ResolverError::ContractFunctionInternalInNormalFunction { span }); - } - self.interner.function_modifiers_mut(function).is_internal = None; - } - } - fn declare_numeric_generics(&mut self, params: &[Type], return_type: &Type) { if self.generics.is_empty() { return; @@ -1256,13 +1236,17 @@ impl<'a> Resolver<'a> { let is_in_stdlib = self.path_resolver.module_id().krate.is_stdlib(); let assert_msg_call_path = if is_in_stdlib { ExpressionKind::Variable(Path { - segments: vec![Ident::from("resolve_assert_message")], + segments: vec![Ident::from("internal"), Ident::from("resolve_assert_message")], kind: PathKind::Crate, span, }) } else { ExpressionKind::Variable(Path { - segments: vec![Ident::from("std"), Ident::from("resolve_assert_message")], + segments: vec![ + Ident::from("std"), + Ident::from("internal"), + Ident::from("resolve_assert_message"), + ], kind: PathKind::Dep, span, }) @@ -1311,7 +1295,7 @@ impl<'a> Resolver<'a> { &mut self, func: FuncId, span: Span, - visibility: FunctionVisibility, + visibility: ItemVisibility, ) { let function_module = self.interner.function_module(func); let current_module = self.path_resolver.module_id(); @@ -1321,8 +1305,8 @@ impl<'a> Resolver<'a> { let current_module = current_module.local_id; let name = self.interner.function_name(&func).to_string(); match visibility { - FunctionVisibility::Public => (), - FunctionVisibility::Private => { + ItemVisibility::Public => (), + ItemVisibility::Private => { if !same_crate || !self.module_descendent_of_target( krate, @@ -1333,7 +1317,7 @@ impl<'a> Resolver<'a> { self.errors.push(ResolverError::PrivateFunctionCalled { span, name }); } } - FunctionVisibility::PublicCrate => { + ItemVisibility::PublicCrate => { if !same_crate { self.errors.push(ResolverError::NonCrateFunctionCalled { span, name }); } @@ -1442,9 +1426,7 @@ impl<'a> Resolver<'a> { self.interner.add_function_dependency(current_item, id); } - if self.interner.function_visibility(id) - != FunctionVisibility::Public - { + if self.interner.function_visibility(id) != ItemVisibility::Public { let span = hir_ident.location.span; self.check_can_reference_function( id, @@ -1999,89 +1981,6 @@ impl<'a> Resolver<'a> { } HirLiteral::FmtStr(str, fmt_str_idents) } - - /// Only sized types are valid to be used as main's parameters or the parameters to a contract - /// function. If the given type is not sized (e.g. contains a slice or NamedGeneric type), an - /// error is issued. - fn verify_type_valid_for_program_input(&mut self, typ: &UnresolvedType) { - match &typ.typ { - UnresolvedTypeData::FieldElement - | UnresolvedTypeData::Integer(_, _) - | UnresolvedTypeData::Bool - | UnresolvedTypeData::Unit - | UnresolvedTypeData::Error => (), - - UnresolvedTypeData::MutableReference(_) - | UnresolvedTypeData::Function(_, _, _) - | UnresolvedTypeData::FormatString(_, _) - | UnresolvedTypeData::TraitAsType(..) - | UnresolvedTypeData::Unspecified => { - let span = typ.span.expect("Function parameters should always have spans"); - self.push_err(ResolverError::InvalidTypeForEntryPoint { span }); - } - - UnresolvedTypeData::Array(length, element) => { - if let Some(length) = length { - self.verify_type_expression_valid_for_program_input(length); - } else { - let span = typ.span.expect("Function parameters should always have spans"); - self.push_err(ResolverError::InvalidTypeForEntryPoint { span }); - } - self.verify_type_valid_for_program_input(element); - } - UnresolvedTypeData::Expression(expression) => { - self.verify_type_expression_valid_for_program_input(expression); - } - UnresolvedTypeData::String(length) => { - if let Some(length) = length { - self.verify_type_expression_valid_for_program_input(length); - } else { - let span = typ.span.expect("Function parameters should always have spans"); - self.push_err(ResolverError::InvalidTypeForEntryPoint { span }); - } - } - UnresolvedTypeData::Named(path, generics, _) => { - // Since the type is named, we need to resolve it to see what it actually refers to - // in order to check whether it is valid. Since resolving it may lead to a - // resolution error, we have to truncate our error count to the previous count just - // in case. This is to ensure resolution errors are not issued twice when this type - // is later resolved properly. - let error_count = self.errors.len(); - let resolved = self.resolve_named_type(path.clone(), generics.clone(), &mut vec![]); - self.errors.truncate(error_count); - - if !resolved.is_valid_for_program_input() { - let span = typ.span.expect("Function parameters should always have spans"); - self.push_err(ResolverError::InvalidTypeForEntryPoint { span }); - } - } - UnresolvedTypeData::Tuple(elements) => { - for element in elements { - self.verify_type_valid_for_program_input(element); - } - } - UnresolvedTypeData::Parenthesized(typ) => self.verify_type_valid_for_program_input(typ), - } - } - - fn verify_type_expression_valid_for_program_input(&mut self, expr: &UnresolvedTypeExpression) { - match expr { - UnresolvedTypeExpression::Constant(_, _) => (), - UnresolvedTypeExpression::Variable(path) => { - let error_count = self.errors.len(); - let resolved = self.resolve_named_type(path.clone(), vec![], &mut vec![]); - self.errors.truncate(error_count); - - if !resolved.is_valid_for_program_input() { - self.push_err(ResolverError::InvalidTypeForEntryPoint { span: path.span() }); - } - } - UnresolvedTypeExpression::BinaryOperation(lhs, _, rhs, _) => { - self.verify_type_expression_valid_for_program_input(lhs); - self.verify_type_expression_valid_for_program_input(rhs); - } - } - } } /// Gives an error if a user tries to create a mutable reference diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs index 8f966be312b..5d546954f0d 100644 --- a/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -8,9 +8,7 @@ use crate::{ graph::CrateId, hir::{ def_collector::{ - dc_crate::{ - check_methods_signatures, CompilationError, UnresolvedTrait, UnresolvedTraitImpl, - }, + dc_crate::{CompilationError, UnresolvedTrait, UnresolvedTraitImpl}, errors::{DefCollectorErrorKind, DuplicateType}, }, def_map::{CrateDefMap, ModuleDefId, ModuleId}, @@ -131,6 +129,7 @@ fn resolve_trait_methods( let func_id = unresolved_trait.method_ids[&name.0.contents]; let (_, func_meta) = resolver.resolve_trait_function( name, + generics, parameters, return_type, where_clause, @@ -365,6 +364,7 @@ pub(crate) fn resolve_trait_by_path( Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), } } + pub(crate) fn resolve_trait_impls( context: &mut Context, traits: Vec, @@ -424,17 +424,6 @@ pub(crate) fn resolve_trait_impls( new_resolver.set_self_type(Some(self_type.clone())); if let Some(trait_id) = maybe_trait_id { - check_methods_signatures( - &mut new_resolver, - &impl_methods, - trait_id, - trait_impl.trait_path.span(), - trait_impl.trait_generics, - trait_impl.generics.len(), - trait_impl.file_id, - errors, - ); - let where_clause = trait_impl .where_clause .into_iter() diff --git a/compiler/noirc_frontend/src/hir/type_check/errors.rs b/compiler/noirc_frontend/src/hir/type_check/errors.rs index 96d30100d8b..7eacc8eb2d1 100644 --- a/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -44,7 +44,7 @@ pub enum TypeCheckError { #[error("Expected type {expected} is not the same as {actual}")] TypeMismatchWithSource { expected: Type, actual: Type, span: Span, source: Source }, #[error("Expected {expected:?} found {found:?}")] - ArityMisMatch { expected: u16, found: u16, span: Span }, + ArityMisMatch { expected: usize, found: usize, span: Span }, #[error("Return type in a function cannot be public")] PublicReturnType { typ: Type, span: Span }, #[error("Cannot cast type {from}, 'as' is only for primitive field or integer types")] @@ -53,8 +53,10 @@ pub enum TypeCheckError { ExpectedFunction { found: Type, span: Span }, #[error("Type {lhs_type} has no member named {field_name}")] AccessUnknownMember { lhs_type: Type, field_name: String, span: Span }, - #[error("Function expects {expected} parameters but {found} given")] + #[error("Function expects {expected} parameters but {found} were given")] ParameterCountMismatch { expected: usize, found: usize, span: Span }, + #[error("{item} expects {expected} generics but {found} were given")] + GenericCountMismatch { item: String, expected: usize, found: usize, span: Span }, #[error("Only integer and Field types may be casted to")] UnsupportedCast { span: Span }, #[error("Index {index} is out of bounds for this tuple {lhs_type} of length {length}")] @@ -122,6 +124,16 @@ pub enum TypeCheckError { ConstrainedReferenceToUnconstrained { span: Span }, #[error("Slices cannot be returned from an unconstrained runtime to a constrained runtime")] UnconstrainedSliceReturnToConstrained { span: Span }, + #[error("Only sized types may be used in the entry point to a program")] + InvalidTypeForEntryPoint { span: Span }, + #[error("Mismatched number of parameters in trait implementation")] + MismatchTraitImplNumParameters { + actual_num_parameters: usize, + expected_num_parameters: usize, + trait_name: String, + method_name: String, + span: Span, + }, } impl TypeCheckError { @@ -191,6 +203,12 @@ impl From for Diagnostic { let msg = format!("Function expects {expected} parameter{empty_or_s} but {found} {was_or_were} given"); Diagnostic::simple_error(msg, String::new(), span) } + TypeCheckError::GenericCountMismatch { item, expected, found, span } => { + let empty_or_s = if expected == 1 { "" } else { "s" }; + let was_or_were = if found == 1 { "was" } else { "were" }; + let msg = format!("{item} expects {expected} generic{empty_or_s} but {found} {was_or_were} given"); + Diagnostic::simple_error(msg, String::new(), span) + } TypeCheckError::InvalidCast { span, .. } | TypeCheckError::ExpectedFunction { span, .. } | TypeCheckError::AccessUnknownMember { span, .. } @@ -284,6 +302,21 @@ impl From for Diagnostic { let msg = format!("Constraint for `{typ}: {trait_name}` is not needed, another matching impl is already in scope"); Diagnostic::simple_warning(msg, "Unnecessary trait constraint in where clause".into(), span) } + TypeCheckError::InvalidTypeForEntryPoint { span } => Diagnostic::simple_error( + "Only sized types may be used in the entry point to a program".to_string(), + "Slices, references, or any type containing them may not be used in main or a contract function".to_string(), span), + TypeCheckError::MismatchTraitImplNumParameters { + expected_num_parameters, + actual_num_parameters, + trait_name, + method_name, + span, + } => { + let plural = if expected_num_parameters == 1 { "" } else { "s" }; + let primary_message = format!( + "`{trait_name}::{method_name}` expects {expected_num_parameters} parameter{plural}, but this method has {actual_num_parameters}"); + Diagnostic::simple_error(primary_message, "".to_string(), span) + } } } } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 7b854e58fca..7219f4d09c6 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -863,7 +863,13 @@ impl<'interner> TypeChecker<'interner> { span: op.location.span, }); - self.comparator_operand_type_rules(x_type, y_type, op, span) + let (_, use_impl) = self.comparator_operand_type_rules(x_type, y_type, op, span)?; + + // If the size is not constant, we must fall back to a user-provided impl for + // equality on slices. + let size = x_size.follow_bindings(); + let use_impl = use_impl || size.evaluate_to_u64().is_none(); + Ok((Bool, use_impl)) } (String(x_size), String(y_size)) => { @@ -1034,9 +1040,9 @@ impl<'interner> TypeChecker<'interner> { } ret } + // ignoring env for subtype on purpose Type::Function(parameters, ret, _env) => { - // ignoring env for subtype on purpose - self.bind_function_type_impl(parameters.as_ref(), ret.as_ref(), args.as_ref(), span) + self.bind_function_type_impl(¶meters, &ret, &args, span) } Type::Error => Type::Error, found => { diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 21d1c75a0f2..ab759f454e5 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -12,11 +12,17 @@ mod expr; mod stmt; pub use errors::TypeCheckError; +use noirc_errors::Span; use crate::{ - hir_def::{expr::HirExpression, stmt::HirStatement, traits::TraitConstraint}, + hir_def::{ + expr::HirExpression, + function::{Param, Parameters}, + stmt::HirStatement, + traits::TraitConstraint, + }, node_interner::{ExprId, FuncId, GlobalId, NodeInterner}, - Type, + Type, TypeBindings, }; use self::errors::Source; @@ -74,6 +80,7 @@ pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec Vec, + func_id: FuncId, + param: &Param, + errors: &mut Vec, +) { + let meta = type_checker.interner.function_meta(&func_id); + if meta.is_entry_point && !param.1.is_valid_for_program_input() { + let span = param.0.span(); + errors.push(TypeCheckError::InvalidTypeForEntryPoint { span }); + } +} + fn function_info(interner: &NodeInterner, function_body_id: &ExprId) -> (noirc_errors::Span, bool) { let (expr_span, empty_function) = if let HirExpression::Block(block) = interner.expression(function_body_id) { @@ -162,6 +185,154 @@ fn function_info(interner: &NodeInterner, function_body_id: &ExprId) -> (noirc_e (expr_span, empty_function) } +/// Checks that the type of a function in a trait impl matches the type +/// of the corresponding function declaration in the trait itself. +/// +/// To do this, given a trait such as: +/// `trait Foo { fn foo(...); }` +/// +/// And an impl such as: +/// `impl Foo for Bar { fn foo(...); } ` +/// +/// We have to substitute: +/// - Self for Bar +/// - A for D +/// - B for F +/// +/// Before we can type check. Finally, we must also check that the unification +/// result does not introduce any new bindings. This can happen if the impl +/// function's type is more general than that of the trait function. E.g. +/// `fn baz(a: A, b: B)` when the impl required `fn baz(a: A, b: A)`. +/// +/// This does not type check the body of the impl function. +pub(crate) fn check_trait_impl_method_matches_declaration( + interner: &mut NodeInterner, + function: FuncId, +) -> Vec { + let meta = interner.function_meta(&function); + let method_name = interner.function_name(&function); + let mut errors = Vec::new(); + + let definition_type = meta.typ.as_monotype(); + + let impl_ = + meta.trait_impl.expect("Trait impl function should have a corresponding trait impl"); + let impl_ = interner.get_trait_implementation(impl_); + let impl_ = impl_.borrow(); + let trait_info = interner.get_trait(impl_.trait_id); + + let mut bindings = TypeBindings::new(); + bindings.insert( + trait_info.self_type_typevar_id, + (trait_info.self_type_typevar.clone(), impl_.typ.clone()), + ); + + if trait_info.generics.len() != impl_.trait_generics.len() { + let expected = trait_info.generics.len(); + let found = impl_.trait_generics.len(); + let span = impl_.ident.span(); + let item = trait_info.name.to_string(); + errors.push(TypeCheckError::GenericCountMismatch { item, expected, found, span }); + } + + // Substitute each generic on the trait with the corresponding generic on the impl + for (generic, arg) in trait_info.generics.iter().zip(&impl_.trait_generics) { + bindings.insert(generic.id(), (generic.clone(), arg.clone())); + } + + // If this is None, the trait does not have the corresponding function. + // This error should have been caught in name resolution already so we don't + // issue an error for it here. + if let Some(trait_fn_id) = trait_info.method_ids.get(method_name) { + let trait_fn_meta = interner.function_meta(trait_fn_id); + + if trait_fn_meta.direct_generics.len() != meta.direct_generics.len() { + let expected = trait_fn_meta.direct_generics.len(); + let found = meta.direct_generics.len(); + let span = meta.name.location.span; + let item = method_name.to_string(); + errors.push(TypeCheckError::GenericCountMismatch { item, expected, found, span }); + } + + // Substitute each generic on the trait function with the corresponding generic on the impl function + for ((_, trait_fn_generic), (name, impl_fn_generic)) in + trait_fn_meta.direct_generics.iter().zip(&meta.direct_generics) + { + let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone()); + bindings.insert(trait_fn_generic.id(), (trait_fn_generic.clone(), arg)); + } + + let (declaration_type, _) = trait_fn_meta.typ.instantiate_with_bindings(bindings, interner); + + check_function_type_matches_expected_type( + &declaration_type, + definition_type, + method_name, + &meta.parameters, + meta.name.location.span, + &trait_info.name.0.contents, + &mut errors, + ); + } + + errors +} + +fn check_function_type_matches_expected_type( + expected: &Type, + actual: &Type, + method_name: &str, + actual_parameters: &Parameters, + span: Span, + trait_name: &str, + errors: &mut Vec, +) { + let mut bindings = TypeBindings::new(); + // Shouldn't need to unify envs, they should always be equal since they're both free functions + if let (Type::Function(params_a, ret_a, _env_a), Type::Function(params_b, ret_b, _env_b)) = + (expected, actual) + { + if params_a.len() == params_b.len() { + for (i, (a, b)) in params_a.iter().zip(params_b.iter()).enumerate() { + if a.try_unify(b, &mut bindings).is_err() { + errors.push(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name: method_name.to_string(), + expected_typ: a.to_string(), + actual_typ: b.to_string(), + parameter_span: actual_parameters.0[i].0.span(), + parameter_index: i + 1, + }); + } + } + + if ret_b.try_unify(ret_a, &mut bindings).is_err() { + errors.push(TypeCheckError::TypeMismatch { + expected_typ: ret_a.to_string(), + expr_typ: ret_b.to_string(), + expr_span: span, + }); + } + } else { + errors.push(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters: params_b.len(), + expected_num_parameters: params_a.len(), + trait_name: trait_name.to_string(), + method_name: method_name.to_string(), + span, + }); + } + } + + // If result bindings is not empty, a type variable was bound which means the two + // signatures were not a perfect match. Note that this relies on us already binding + // all the expected generics to each other prior to this check. + if !bindings.is_empty() { + let expected_typ = expected.to_string(); + let expr_typ = actual.to_string(); + errors.push(TypeCheckError::TypeMismatch { expected_typ, expr_typ, expr_span: span }); + } +} + impl<'interner> TypeChecker<'interner> { fn new(interner: &'interner mut NodeInterner) -> Self { Self { interner, errors: Vec::new(), trait_constraints: Vec::new(), current_function: None } @@ -329,6 +500,8 @@ mod test { trait_impl: None, return_type: FunctionReturnType::Default(Span::default()), trait_constraints: Vec::new(), + direct_generics: Vec::new(), + is_entry_point: true, }; interner.push_fn_meta(func_meta, func_id); diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index d3ab2a9393b..56543e8185c 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -1,12 +1,14 @@ use iter_extended::vecmap; use noirc_errors::{Location, Span}; +use std::rc::Rc; + use super::expr::{HirBlockExpression, HirExpression, HirIdent}; use super::stmt::HirPattern; use super::traits::TraitConstraint; use crate::node_interner::{ExprId, NodeInterner, TraitImplId}; use crate::FunctionKind; -use crate::{Distinctness, FunctionReturnType, Type, Visibility}; +use crate::{Distinctness, FunctionReturnType, Type, TypeVariable, Visibility}; /// A Hir function is a block expression /// with a list of statements @@ -103,6 +105,12 @@ pub struct FuncMeta { /// or a Type::Forall for generic functions. pub typ: Type, + /// The set of generics that are declared directly on this function in the source code. + /// This does not include generics from an outer scope, like those introduced by + /// an `impl` block. This also does not include implicit generics added by the compiler + /// such as a trait's `Self` type variable. + pub direct_generics: Vec<(Rc, TypeVariable)>, + pub location: Location, // This flag is needed for the attribute check pass @@ -112,6 +120,10 @@ pub struct FuncMeta { /// The trait impl this function belongs to, if any pub trait_impl: Option, + + /// True if this function is an entry point to the program. + /// For non-contracts, this means the function is `main`. + pub is_entry_point: bool, } impl FuncMeta { diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index b70aa43701c..5ab036eef5b 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -703,10 +703,10 @@ impl Type { | Type::TraitAsType(..) | Type::NotConstant => false, - // This function is called during name resolution before we've verified aliases - // are not cyclic. As a result, it wouldn't be safe to check this alias' definition - // to see if the aliased type is valid. - Type::Alias(..) => false, + Type::Alias(alias, generics) => { + let alias = alias.borrow(); + alias.get_type(generics).is_valid_for_program_input() + } Type::Array(length, element) => { length.is_valid_for_program_input() && element.is_valid_for_program_input() @@ -1768,9 +1768,11 @@ impl From<&Type> for PrintableType { Type::TypeVariable(_, _) => unreachable!(), Type::NamedGeneric(..) => unreachable!(), Type::Forall(..) => unreachable!(), - Type::Function(_, _, env) => { - PrintableType::Function { env: Box::new(env.as_ref().into()) } - } + Type::Function(arguments, return_type, env) => PrintableType::Function { + arguments: arguments.iter().map(|arg| arg.into()).collect(), + return_type: Box::new(return_type.as_ref().into()), + env: Box::new(env.as_ref().into()), + }, Type::MutableReference(typ) => { PrintableType::MutableReference { typ: Box::new(typ.as_ref().into()) } } diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index f096c220200..3dc9d05b15e 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -664,7 +664,6 @@ pub enum Keyword { Let, Mod, Mut, - Open, Pub, Return, ReturnData, @@ -706,7 +705,6 @@ impl fmt::Display for Keyword { Keyword::Let => write!(f, "let"), Keyword::Mod => write!(f, "mod"), Keyword::Mut => write!(f, "mut"), - Keyword::Open => write!(f, "open"), Keyword::Pub => write!(f, "pub"), Keyword::Return => write!(f, "return"), Keyword::ReturnData => write!(f, "return_data"), @@ -751,7 +749,6 @@ impl Keyword { "let" => Keyword::Let, "mod" => Keyword::Mod, "mut" => Keyword::Mut, - "open" => Keyword::Open, "pub" => Keyword::Pub, "return" => Keyword::Return, "return_data" => Keyword::ReturnData, diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index be007929fc4..1871b594ae7 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -56,14 +56,13 @@ pub mod macros_api { pub use crate::hir::def_map::ModuleDefId; pub use crate::{ hir::Context as HirContext, BlockExpression, CallExpression, CastExpression, Distinctness, - Expression, ExpressionKind, FunctionReturnType, Ident, IndexExpression, LetStatement, - Literal, MemberAccessExpression, MethodCallExpression, NoirFunction, Path, PathKind, - Pattern, Statement, UnresolvedType, UnresolvedTypeData, Visibility, + Expression, ExpressionKind, FunctionReturnType, Ident, IndexExpression, ItemVisibility, + LetStatement, Literal, MemberAccessExpression, MethodCallExpression, NoirFunction, Path, + PathKind, Pattern, Statement, UnresolvedType, UnresolvedTypeData, Visibility, }; pub use crate::{ - ForLoopStatement, ForRange, FunctionDefinition, FunctionVisibility, ImportStatement, - NoirStruct, Param, PrefixExpression, Signedness, StatementKind, StructType, Type, TypeImpl, - UnaryOp, + ForLoopStatement, ForRange, FunctionDefinition, ImportStatement, NoirStruct, Param, + PrefixExpression, Signedness, StatementKind, StructType, Type, TypeImpl, UnaryOp, }; /// Methods to process the AST before and after type checking @@ -77,12 +76,12 @@ pub mod macros_api { ) -> Result; // TODO(#4653): generalize this function - fn process_unresolved_traits_impls( + fn process_collected_defs( &self, _crate_id: &CrateId, _context: &mut HirContext, - _unresolved_traits_impls: &[UnresolvedTraitImpl], - _collected_functions: &mut Vec, + _collected_trait_impls: &[UnresolvedTraitImpl], + _collected_functions: &mut [UnresolvedFunctions], ) -> Result<(), (MacroError, FileId)>; /// Function to manipulate the AST after type checking has been completed. diff --git a/compiler/noirc_frontend/src/monomorphization/ast.rs b/compiler/noirc_frontend/src/monomorphization/ast.rs index e4e619d5d92..7fcf8e87792 100644 --- a/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -1,7 +1,7 @@ use acvm::FieldElement; use iter_extended::vecmap; use noirc_errors::{ - debug_info::{DebugTypes, DebugVariables}, + debug_info::{DebugFunctions, DebugTypes, DebugVariables}, Location, }; @@ -253,6 +253,7 @@ pub struct Program { /// Indicates to a backend whether a SNARK-friendly prover should be used. pub recursive: bool, pub debug_variables: DebugVariables, + pub debug_functions: DebugFunctions, pub debug_types: DebugTypes, } @@ -266,6 +267,7 @@ impl Program { return_visibility: Visibility, recursive: bool, debug_variables: DebugVariables, + debug_functions: DebugFunctions, debug_types: DebugTypes, ) -> Program { Program { @@ -276,6 +278,7 @@ impl Program { return_visibility, recursive, debug_variables, + debug_functions, debug_types, } } diff --git a/compiler/noirc_frontend/src/monomorphization/debug.rs b/compiler/noirc_frontend/src/monomorphization/debug.rs index a8ff4399f99..cf4e0ab792e 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -76,7 +76,7 @@ impl<'interner> Monomorphizer<'interner> { let var_type = self.interner.id_type(call.arguments[DEBUG_VALUE_ARG_SLOT]); let source_var_id = source_var_id.to_u128().into(); // then update the ID used for tracking at runtime - let var_id = self.debug_type_tracker.insert_var(source_var_id, var_type); + let var_id = self.debug_type_tracker.insert_var(source_var_id, &var_type); let interned_var_id = self.intern_var_id(var_id, &call.location); arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; Ok(()) diff --git a/compiler/noirc_frontend/src/monomorphization/debug_types.rs b/compiler/noirc_frontend/src/monomorphization/debug_types.rs index fea073d394f..16b82d1e7b9 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug_types.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug_types.rs @@ -3,7 +3,8 @@ use crate::{ hir_def::types::Type, }; use noirc_errors::debug_info::{ - DebugTypeId, DebugTypes, DebugVarId, DebugVariable, DebugVariables, + DebugFnId, DebugFunction, DebugFunctions, DebugTypeId, DebugTypes, DebugVarId, DebugVariable, + DebugVariables, }; use noirc_printable_type::PrintableType; use std::collections::HashMap; @@ -30,7 +31,10 @@ pub struct DebugTypeTracker { // All instances of tracked variables variables: HashMap, - // Types of tracked variables + // Function metadata collected during instrumentation injection + functions: HashMap, + + // Types of tracked variables and functions types: HashMap, types_reverse: HashMap, @@ -43,34 +47,29 @@ impl DebugTypeTracker { DebugTypeTracker { source_variables: instrumenter.variables.clone(), source_field_names: instrumenter.field_names.clone(), + functions: instrumenter.functions.clone(), ..DebugTypeTracker::default() } } - pub fn extract_vars_and_types(&self) -> (DebugVariables, DebugTypes) { + pub fn extract_vars_and_types(&self) -> (DebugVariables, DebugFunctions, DebugTypes) { let debug_variables = self .variables .clone() .into_iter() .map(|(var_id, (source_var_id, type_id))| { - ( - var_id, - DebugVariable { - name: self.source_variables.get(&source_var_id).cloned().unwrap_or_else( - || { - unreachable!( - "failed to retrieve variable name for {source_var_id:?}" - ); - }, - ), - debug_type_id: type_id, - }, - ) + let var_name = + self.source_variables.get(&source_var_id).cloned().unwrap_or_else(|| { + unreachable!("failed to retrieve variable name for {source_var_id:?}"); + }); + (var_id, DebugVariable { name: var_name, debug_type_id: type_id }) }) .collect(); + + let debug_functions = self.functions.clone().into_iter().collect(); let debug_types = self.types.clone().into_iter().collect(); - (debug_variables, debug_types) + (debug_variables, debug_functions, debug_types) } pub fn resolve_field_index( @@ -83,19 +82,24 @@ impl DebugTypeTracker { .and_then(|field_name| get_field(cursor_type, field_name)) } - pub fn insert_var(&mut self, source_var_id: SourceVarId, var_type: Type) -> DebugVarId { - if !self.source_variables.contains_key(&source_var_id) { - unreachable!("cannot find source debug variable {source_var_id:?}"); - } - - let ptype: PrintableType = var_type.follow_bindings().into(); - let type_id = self.types_reverse.get(&ptype).copied().unwrap_or_else(|| { + fn insert_type(&mut self, the_type: &Type) -> DebugTypeId { + let ptype: PrintableType = the_type.follow_bindings().into(); + self.types_reverse.get(&ptype).copied().unwrap_or_else(|| { let type_id = DebugTypeId(self.next_type_id); self.next_type_id += 1; self.types_reverse.insert(ptype.clone(), type_id); self.types.insert(type_id, ptype); type_id - }); + }) + } + + pub fn insert_var(&mut self, source_var_id: SourceVarId, var_type: &Type) -> DebugVarId { + if !self.source_variables.contains_key(&source_var_id) { + unreachable!("cannot find source debug variable {source_var_id:?}"); + } + + let type_id = self.insert_type(var_type); + // check if we need to instantiate the var with a new type let existing_var_id = self.source_to_debug_vars.get(&source_var_id).and_then(|var_id| { let (_, existing_type_id) = self.variables.get(var_id).unwrap(); diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index ce880401d77..4938d33aff9 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -28,8 +28,8 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, IntegerBitSize, Signedness, Type, TypeBinding, - TypeBindings, TypeVariable, TypeVariableKind, UnaryOp, Visibility, + FunctionKind, IntegerBitSize, Signedness, Type, TypeBinding, TypeBindings, TypeVariable, + TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -165,7 +165,8 @@ pub fn monomorphize_debug( let FuncMeta { return_distinctness, return_visibility, kind, .. } = monomorphizer.interner.function_meta(&main); - let (debug_variables, debug_types) = monomorphizer.debug_type_tracker.extract_vars_and_types(); + let (debug_variables, debug_functions, debug_types) = + monomorphizer.debug_type_tracker.extract_vars_and_types(); let program = Program::new( functions, function_sig, @@ -174,6 +175,7 @@ pub fn monomorphize_debug( *return_visibility, *kind == FunctionKind::Recursive, debug_variables, + debug_functions, debug_types, ); Ok(program) @@ -310,8 +312,7 @@ impl<'interner> Monomorphizer<'interner> { Type::TraitAsType(..) => &body_return_type, _ => meta.return_type(), }); - let unconstrained = modifiers.is_unconstrained - || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); + let unconstrained = modifiers.is_unconstrained; let parameters = self.parameters(&meta.parameters); let body = self.expr(body_expr_id)?; @@ -1689,23 +1690,15 @@ impl<'interner> Monomorphizer<'interner> { } fn unwrap_tuple_type(typ: &HirType) -> Vec { - match typ { + match typ.follow_bindings() { HirType::Tuple(fields) => fields.clone(), - HirType::TypeVariable(binding, TypeVariableKind::Normal) => match &*binding.borrow() { - TypeBinding::Bound(binding) => unwrap_tuple_type(binding), - TypeBinding::Unbound(_) => unreachable!(), - }, other => unreachable!("unwrap_tuple_type: expected tuple, found {:?}", other), } } fn unwrap_struct_type(typ: &HirType) -> Vec<(String, HirType)> { - match typ { - HirType::Struct(def, args) => def.borrow().get_fields(args), - HirType::TypeVariable(binding, TypeVariableKind::Normal) => match &*binding.borrow() { - TypeBinding::Bound(binding) => unwrap_struct_type(binding), - TypeBinding::Unbound(_) => unreachable!(), - }, + match typ.follow_bindings() { + HirType::Struct(def, args) => def.borrow().get_fields(&args), other => unreachable!("unwrap_struct_type: expected struct, found {:?}", other), } } diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 5de43e59254..b83d2008530 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -28,8 +28,8 @@ use crate::hir_def::{ }; use crate::token::{Attributes, SecondaryAttribute}; use crate::{ - BinaryOpKind, ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, - TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, + BinaryOpKind, FunctionDefinition, Generics, ItemVisibility, Shared, TypeAlias, TypeBindings, + TypeVariable, TypeVariableId, TypeVariableKind, }; /// An arbitrary number to limit the recursion depth when searching for trait impls. @@ -236,20 +236,11 @@ pub struct FunctionModifiers { pub name: String, /// Whether the function is `pub` or not. - pub visibility: FunctionVisibility, + pub visibility: ItemVisibility, pub attributes: Attributes, pub is_unconstrained: bool, - - /// This function's type in its contract. - /// If this function is not in a contract, this is always 'Secret'. - pub contract_function_type: Option, - - /// This function's contract visibility. - /// If this function is internal can only be called by itself. - /// Will be None if not in contract. - pub is_internal: Option, } impl FunctionModifiers { @@ -259,11 +250,9 @@ impl FunctionModifiers { pub fn new() -> Self { Self { name: String::new(), - visibility: FunctionVisibility::Public, + visibility: ItemVisibility::Public, attributes: Attributes::empty(), is_unconstrained: false, - is_internal: None, - contract_function_type: None, } } } @@ -759,17 +748,11 @@ impl NodeInterner { module: ModuleId, location: Location, ) -> DefinitionId { - use ContractFunctionType::*; - - // We're filling in contract_function_type and is_internal now, but these will be verified - // later during name resolution. let modifiers = FunctionModifiers { name: function.name.0.contents.clone(), visibility: function.visibility, attributes: function.attributes.clone(), is_unconstrained: function.is_unconstrained, - contract_function_type: Some(if function.is_open { Open } else { Secret }), - is_internal: Some(function.is_internal), }; self.push_function_definition(id, modifiers, module, location) } @@ -799,7 +782,7 @@ impl NodeInterner { /// /// The underlying function_visibilities map is populated during def collection, /// so this function can be called anytime afterward. - pub fn function_visibility(&self, func: FuncId) -> FunctionVisibility { + pub fn function_visibility(&self, func: FuncId) -> ItemVisibility { self.function_modifiers[&func].visibility } diff --git a/compiler/noirc_frontend/src/parser/mod.rs b/compiler/noirc_frontend/src/parser/mod.rs index 0ff7819c00f..ea96dee8a47 100644 --- a/compiler/noirc_frontend/src/parser/mod.rs +++ b/compiler/noirc_frontend/src/parser/mod.rs @@ -14,8 +14,8 @@ mod parser; use crate::token::{Keyword, Token}; use crate::{ast::ImportStatement, Expression, NoirStruct}; use crate::{ - Ident, LetStatement, NoirFunction, NoirTrait, NoirTraitImpl, NoirTypeAlias, Recoverable, - StatementKind, TypeImpl, UseTree, + Ident, LetStatement, ModuleDeclaration, NoirFunction, NoirTrait, NoirTraitImpl, NoirTypeAlias, + Recoverable, StatementKind, TypeImpl, UseTree, }; use chumsky::prelude::*; @@ -28,7 +28,7 @@ pub use parser::parse_program; #[derive(Debug, Clone)] pub(crate) enum TopLevelStatement { Function(NoirFunction), - Module(Ident), + Module(ModuleDeclaration), Import(UseTree), Struct(NoirStruct), Trait(NoirTrait), @@ -220,7 +220,7 @@ pub struct SortedModule { pub globals: Vec, /// Module declarations like `mod foo;` - pub module_decls: Vec, + pub module_decls: Vec, /// Full submodules as in `mod foo { ... definitions ... }` pub submodules: Vec, @@ -229,7 +229,7 @@ pub struct SortedModule { impl std::fmt::Display for SortedModule { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for decl in &self.module_decls { - writeln!(f, "mod {decl};")?; + writeln!(f, "{decl};")?; } for import in &self.imports { @@ -309,7 +309,7 @@ pub enum ItemKind { Impl(TypeImpl), TypeAlias(NoirTypeAlias), Global(LetStatement), - ModuleDecl(Ident), + ModuleDecl(ModuleDeclaration), Submodules(ParsedSubModule), } @@ -380,8 +380,8 @@ impl SortedModule { self.imports.extend(import_stmt.desugar(None)); } - fn push_module_decl(&mut self, mod_name: Ident) { - self.module_decls.push(mod_name); + fn push_module_decl(&mut self, mod_decl: ModuleDeclaration) { + self.module_decls.push(mod_decl); } fn push_submodule(&mut self, submodule: SortedSubModule) { @@ -474,7 +474,7 @@ impl std::fmt::Display for TopLevelStatement { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { TopLevelStatement::Function(fun) => fun.fmt(f), - TopLevelStatement::Module(m) => write!(f, "mod {m}"), + TopLevelStatement::Module(m) => m.fmt(f), TopLevelStatement::Import(tree) => write!(f, "use {tree}"), TopLevelStatement::Trait(t) => t.fmt(f), TopLevelStatement::TraitImpl(i) => i.fmt(f), diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 75f4a6359bf..383a1ffafc9 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -40,9 +40,9 @@ use crate::parser::{force, ignore_then_commit, statement_recovery}; use crate::token::{Keyword, Token, TokenKind}; use crate::{ BinaryOp, BinaryOpKind, BlockExpression, Distinctness, ForLoopStatement, ForRange, - FunctionReturnType, Ident, IfExpression, InfixExpression, LValue, Literal, NoirTypeAlias, - Param, Path, Pattern, Recoverable, Statement, TraitBound, TypeImpl, UnresolvedTraitConstraint, - UnresolvedTypeExpression, UseTree, UseTreeKind, Visibility, + FunctionReturnType, Ident, IfExpression, InfixExpression, LValue, Literal, ModuleDeclaration, + NoirTypeAlias, Param, Path, Pattern, Recoverable, Statement, TraitBound, TypeImpl, + UnresolvedTraitConstraint, UnresolvedTypeExpression, UseTree, UseTreeKind, Visibility, }; use chumsky::prelude::*; @@ -370,7 +370,9 @@ fn optional_type_annotation<'a>() -> impl NoirParser + 'a { } fn module_declaration() -> impl NoirParser { - keyword(Keyword::Mod).ignore_then(ident()).map(TopLevelStatement::Module) + keyword(Keyword::Mod) + .ignore_then(ident()) + .map(|ident| TopLevelStatement::Module(ModuleDeclaration { ident })) } fn use_statement() -> impl NoirParser { diff --git a/compiler/noirc_frontend/src/parser/parser/function.rs b/compiler/noirc_frontend/src/parser/parser/function.rs index 7448d84cfe1..7c365e7eb5a 100644 --- a/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/compiler/noirc_frontend/src/parser/parser/function.rs @@ -8,7 +8,7 @@ use crate::parser::labels::ParsingRuleLabel; use crate::parser::spanned; use crate::token::{Keyword, Token}; use crate::{ - Distinctness, FunctionDefinition, FunctionReturnType, FunctionVisibility, Ident, NoirFunction, + Distinctness, FunctionDefinition, FunctionReturnType, Ident, ItemVisibility, NoirFunction, Param, Visibility, }; @@ -36,9 +36,6 @@ pub(super) fn function_definition(allow_self: bool) -> impl NoirParser impl NoirParser impl NoirParser { +fn visibility_modifier() -> impl NoirParser { let is_pub_crate = (keyword(Keyword::Pub) .then_ignore(just(Token::LeftParen)) .then_ignore(keyword(Keyword::Crate)) .then_ignore(just(Token::RightParen))) - .map(|_| FunctionVisibility::PublicCrate); + .map(|_| ItemVisibility::PublicCrate); - let is_pub = keyword(Keyword::Pub).map(|_| FunctionVisibility::Public); + let is_pub = keyword(Keyword::Pub).map(|_| ItemVisibility::Public); - let is_private = empty().map(|_| FunctionVisibility::Private); + let is_private = empty().map(|_| ItemVisibility::Private); choice((is_pub_crate, is_pub, is_private)) } -/// function_modifiers: 'unconstrained'? (visibility)? 'open'? +/// function_modifiers: 'unconstrained'? (visibility)? /// /// returns (is_unconstrained, visibility, is_open) for whether each keyword was present -fn function_modifiers() -> impl NoirParser<(bool, FunctionVisibility, bool)> { +fn function_modifiers() -> impl NoirParser<(bool, ItemVisibility)> { keyword(Keyword::Unconstrained) .or_not() .then(visibility_modifier()) - .then(keyword(Keyword::Open).or_not()) - .map(|((unconstrained, visibility), open)| { - (unconstrained.is_some(), visibility, open.is_some()) - }) + .map(|(unconstrained, visibility)| (unconstrained.is_some(), visibility)) } /// non_empty_ident_list: ident ',' non_empty_ident_list diff --git a/compiler/noirc_frontend/src/parser/parser/traits.rs b/compiler/noirc_frontend/src/parser/parser/traits.rs index 0d72fbd5303..1e2a6b4d65d 100644 --- a/compiler/noirc_frontend/src/parser/parser/traits.rs +++ b/compiler/noirc_frontend/src/parser/parser/traits.rs @@ -11,7 +11,7 @@ use crate::{ ParserErrorReason, TopLevelStatement, }, token::{Keyword, Token}, - Expression, FunctionVisibility, NoirTrait, NoirTraitImpl, TraitBound, TraitImplItem, TraitItem, + Expression, ItemVisibility, NoirTrait, NoirTraitImpl, TraitBound, TraitImplItem, TraitItem, UnresolvedTraitConstraint, UnresolvedType, }; @@ -120,15 +120,11 @@ pub(super) fn trait_implementation() -> impl NoirParser { fn trait_implementation_body() -> impl NoirParser> { let function = function::function_definition(true).validate(|mut f, span, emit| { - if f.def().is_internal - || f.def().is_unconstrained - || f.def().is_open - || f.def().visibility != FunctionVisibility::Private - { + if f.def().is_unconstrained || f.def().visibility != ItemVisibility::Private { emit(ParserError::with_reason(ParserErrorReason::TraitImplFunctionModifiers, span)); } // Trait impl functions are always public - f.def_mut().visibility = FunctionVisibility::Public; + f.def_mut().visibility = ItemVisibility::Public; TraitImplItem::Function(f) }); diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index c661cc92eef..3f78bd43ba9 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -81,7 +81,7 @@ mod test { &mut context, program.clone().into_sorted(), root_file_id, - Vec::new(), // No macro processors + &[], // No macro processors )); } (program, context, errors) @@ -535,15 +535,13 @@ mod test { assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); for (err, _file_id) in errors { match &err { - CompilationError::DefinitionError( - DefCollectorErrorKind::MismatchTraitImplementationNumParameters { - actual_num_parameters, - expected_num_parameters, - trait_name, - method_name, - .. - }, - ) => { + CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters, + expected_num_parameters, + trait_name, + method_name, + .. + }) => { assert_eq!(actual_num_parameters, &1_usize); assert_eq!(expected_num_parameters, &2_usize); assert_eq!(method_name, "default"); @@ -1206,4 +1204,13 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { "#; assert_eq!(get_program_errors(src).len(), 1); } + + #[test] + fn type_aliases_in_entry_point() { + let src = r#" + type Foo = u8; + fn main(_x: Foo) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); + } } diff --git a/compiler/noirc_printable_type/src/lib.rs b/compiler/noirc_printable_type/src/lib.rs index 24f4f275a14..60f233cd86d 100644 --- a/compiler/noirc_printable_type/src/lib.rs +++ b/compiler/noirc_printable_type/src/lib.rs @@ -33,6 +33,8 @@ pub enum PrintableType { length: u64, }, Function { + arguments: Vec, + return_type: Box, env: Box, }, MutableReference { @@ -176,8 +178,8 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str("false"); } } - (PrintableValue::Field(_), PrintableType::Function { .. }) => { - output.push_str("<>"); + (PrintableValue::Field(_), PrintableType::Function { arguments, return_type, .. }) => { + output.push_str(&format!("< {:?}>>", arguments, return_type,)); } (_, PrintableType::MutableReference { .. }) => { output.push_str("<>"); @@ -350,7 +352,7 @@ pub fn decode_value( PrintableValue::Struct(struct_map) } - PrintableType::Function { env } => { + PrintableType::Function { env, .. } => { let field_element = field_iterator.next().unwrap(); let func_ref = PrintableValue::Field(field_element); // we want to consume the fields from the environment, but for now they are not actually printed diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index 67584a2def1..6dfa3215483 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.24.0", + "version": "0.25.0", "license": "(MIT OR Apache-2.0)", "main": "dist/main.js", "types": "./dist/types/src/index.d.cts", diff --git a/compiler/wasm/src/noir/package.ts b/compiler/wasm/src/noir/package.ts index 81178e6ae96..2856798273a 100644 --- a/compiler/wasm/src/noir/package.ts +++ b/compiler/wasm/src/noir/package.ts @@ -105,7 +105,12 @@ export class Package { handles .filter((handle) => SOURCE_EXTENSIONS.find((ext) => handle.endsWith(ext))) .map(async (file) => { - const suffix = file.replace(this.#srcPath, ''); + // Github deps are directly added to the file manager, which causes them to be missing the absolute path to the source file + // and only include the extraction directory relative to the fm root directory + // This regexp ensures we remove the "real" source path for all dependencies, providing the compiler with what it expects for each source file: + // -> for bin/contract packages + // -> for libs + const suffix = file.replace(new RegExp(`.*${this.#srcPath}`), ''); return { path: this.getType() === 'lib' ? `${alias ? alias : this.#config.package.name}${suffix}` : file, source: (await fm.readFile(file, 'utf-8')).toString(), diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts index 832a6ed9bf9..935c99043da 100644 --- a/compiler/wasm/src/types/noir_artifact.ts +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -32,19 +32,16 @@ export interface EventAbi { fields: ABIVariable[]; } -/** The Noir function types. */ -export type NoirFunctionType = 'Open' | 'Secret' | 'Unconstrained'; - /** * The compilation result of an Noir function. */ export interface NoirFunctionEntry { /** The name of the function. */ name: string; - /** The type of the function. */ - function_type: NoirFunctionType; - /** Whether the function is internal. */ - is_internal: boolean; + /** Whether the function is unconstrained. */ + is_unconstrained: boolean; + /** The custom attributes applied to the function. */ + custom_attributes: string[]; /** The ABI of the function. */ abi: Abi; /** The bytecode of the function in base64. */ diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr index 5c0b5a621e0..144bcec0532 100644 --- a/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr @@ -1 +1 @@ -mod module; \ No newline at end of file +mod module; diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr index 2746c97edf0..f4ad3bff5c9 100644 --- a/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr @@ -1 +1 @@ -mod foo; \ No newline at end of file +mod foo; diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr index e0c82fb1960..0376cd4cb87 100644 --- a/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr @@ -1,3 +1,3 @@ pub fn bar(param: Field) -> Field { - dep::std::hash::pedersen_hash([param]) + dep::std::hash::pedersen_hash([param]) } diff --git a/compiler/wasm/test/fixtures/noir-contract/src/main.nr b/compiler/wasm/test/fixtures/noir-contract/src/main.nr index b980af369cf..fc1dc8a5a17 100644 --- a/compiler/wasm/test/fixtures/noir-contract/src/main.nr +++ b/compiler/wasm/test/fixtures/noir-contract/src/main.nr @@ -5,8 +5,7 @@ contract TestContract { [foo::bar(param), param + pub_param] } - open fn openFunction() -> pub Field { + fn someFunction() -> pub Field { 42 } - } diff --git a/cspell.json b/cspell.json index a96e3de901a..d961b600f40 100644 --- a/cspell.json +++ b/cspell.json @@ -154,6 +154,8 @@ "sdiv", "secp256k1", "secp256r1", + "Secpk", + "Secpr", "signedness", "signorecello", "smol", diff --git a/deny.toml b/deny.toml index 72150f08a3c..578f8427263 100644 --- a/deny.toml +++ b/deny.toml @@ -2,11 +2,13 @@ # More documentation for the advisories section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html [advisories] -vulnerability = "deny" -unmaintained = "warn" -unsound = "warn" +version = 2 yanked = "warn" -notice = "warn" + +ignore = [ + "RUSTSEC-2020-0168", # mach unmaintained + "RUSTSEC-2020-0016" # net2 unmaintained +] # This section is considered when running `cargo deny check bans`. # More documentation about the 'bans' section can be found here: @@ -32,9 +34,8 @@ skip = [] skip-tree = [] [licenses] -unlicensed = "deny" +version = 2 confidence-threshold = 0.9 -# copyleft = "deny" # List of explicitly allowed licenses # See https://spdx.org/licenses/ for list of possible licenses diff --git a/docs/.markdownlint.json b/docs/.markdownlint.json new file mode 100644 index 00000000000..40896b4542f --- /dev/null +++ b/docs/.markdownlint.json @@ -0,0 +1,3 @@ +{ + "no-missing-space-atx": false +} diff --git a/docs/docs/noir/standard_library/bigint.md b/docs/docs/noir/standard_library/bigint.md new file mode 100644 index 00000000000..7d7931840cf --- /dev/null +++ b/docs/docs/noir/standard_library/bigint.md @@ -0,0 +1,102 @@ +--- +title: Big Integers +description: How to use big integers from Noir standard library +keywords: + [ + Big Integer, + Noir programming language, + Noir libraries, + ] +--- + +The BigInt module in the standard library exposes some class of integers which do not fit (well) into a Noir native field. It implements modulo arithmetic, modulo a 'big' prime number. + +:::note + +The module can currently be considered as `Field`s with fixed modulo sizes used by a set of elliptic curves, in addition to just the native curve. [More work](https://github.com/noir-lang/noir/issues/510) is needed to achieve arbitrarily sized big integers. + +::: + +Currently 6 classes of integers (i.e 'big' prime numbers) are available in the module, namely: + +- BN254 Fq: Bn254Fq +- BN254 Fr: Bn254Fr +- Secp256k1 Fq: Secpk1Fq +- Secp256k1 Fr: Secpk1Fr +- Secp256r1 Fr: Secpr1Fr +- Secp256r1 Fq: Secpr1Fq + +Where XXX Fq and XXX Fr denote respectively the order of the base and scalar field of the (usual) elliptic curve XXX. +For instance the big integer 'Secpk1Fq' in the standard library refers to integers modulo $2^{256}-2^{32}-977$. + +Feel free to explore the source code for the other primes: + +#include_code curve_order_base noir_stdlib/src/bigint.nr rust + +## Example usage + +A common use-case is when constructing a big integer from its bytes representation, and performing arithmetic operations on it: + +#include_code big_int_example test_programs/execution_success/bigint/src/main.nr rust + +## Methods + +The available operations for each big integer are: + +### from_le_bytes + +Construct a big integer from its little-endian bytes representation. Example: + +```rust + let a = Secpk1Fq::from_le_bytes([x, y, 0, 45, 2]); + ``` + +Sure, here's the formatted version of the remaining methods: + +### to_le_bytes + +Return the little-endian bytes representation of a big integer. Example: + +```rust +let bytes = a.to_le_bytes(); +``` + +### add + +Add two big integers. Example: + +```rust +let sum = a + b; +``` + +### sub + +Subtract two big integers. Example: + +```rust +let difference = a - b; +``` + +### mul + +Multiply two big integers. Example: + +```rust +let product = a * b; +``` + +### div + +Divide two big integers. Note that division is field division and not euclidean division. Example: + +```rust +let quotient = a / b; +``` + +### eq + +Compare two big integers. Example: + +```rust +let are_equal = a == b; +``` diff --git a/docs/docs/noir/standard_library/containers/hashmap.md b/docs/docs/noir/standard_library/containers/hashmap.md new file mode 100644 index 00000000000..093b6d38d11 --- /dev/null +++ b/docs/docs/noir/standard_library/containers/hashmap.md @@ -0,0 +1,270 @@ +--- +title: HashMap +keywords: [noir, map, hash, hashmap] +sidebar_position: 1 +--- + +`HashMap` is used to efficiently store and look up key-value pairs. + +`HashMap` is a bounded type which can store anywhere from zero to `MaxLen` total elements. +Note that due to hash collisions, the actual maximum number of elements stored by any particular +hashmap is likely lower than `MaxLen`. This is true even with cryptographic hash functions since +every hash value will be performed modulo `MaxLen`. + +When creating `HashMap`s, the `MaxLen` generic should always be specified if it is not already +known. Otherwise, the compiler may infer a different value for `MaxLen` (such as zero), which +will likely change the result of the program. This behavior is set to become an error in future +versions instead. + +Example: + +```rust +// Create a mapping from Fields to u32s with a maximum length of 12 +// using a pedersen hash +let mut map: HashMap> = HashMap::default(); + +map.insert(1, 2); +map.insert(3, 4); + +let two = map.get(1).unwrap(); +``` + +## Methods + +### default + +#include_code default noir_stdlib/src/collections/map.nr rust + +Creates a fresh, empty HashMap. + +When using this function, always make sure to specify the maximum size of the hash map. + +This is the same `default` from the `Default` implementation given further below. It is +repeated here for convenience since it is the recommended way to create a hashmap. + +Example: + +#include_code default_example test_programs/execution_success/hashmap/src/main.nr rust + +Because `HashMap` has so many generic arguments that are likely to be the same throughout +your program, it may be helpful to create a type alias: + +#include_code type_alias test_programs/execution_success/hashmap/src/main.nr rust + +### with_hasher + +#include_code with_hasher noir_stdlib/src/collections/map.nr rust + +Creates a hashmap with an existing `BuildHasher`. This can be used to ensure multiple +hashmaps are created with the same hasher instance. + +Example: + +#include_code with_hasher_example test_programs/execution_success/hashmap/src/main.nr rust + +### get + +#include_code get noir_stdlib/src/collections/map.nr rust + +Retrieves a value from the hashmap, returning `Option::none()` if it was not found. + +Example: + +#include_code get_example test_programs/execution_success/hashmap/src/main.nr rust + +### insert + +#include_code insert noir_stdlib/src/collections/map.nr rust + +Inserts a new key-value pair into the map. If the key was already in the map, its +previous value will be overridden with the newly provided one. + +Example: + +#include_code insert_example test_programs/execution_success/hashmap/src/main.nr rust + +### remove + +#include_code remove noir_stdlib/src/collections/map.nr rust + +Removes the given key-value pair from the map. If the key was not already present +in the map, this does nothing. + +Example: + +#include_code remove_example test_programs/execution_success/hashmap/src/main.nr rust + +### is_empty + +#include_code is_empty noir_stdlib/src/collections/map.nr rust + +True if the length of the hash map is empty. + +Example: + +#include_code is_empty_example test_programs/execution_success/hashmap/src/main.nr rust + +### len + +#include_code len noir_stdlib/src/collections/map.nr rust + +Returns the current length of this hash map. + +Example: + +#include_code len_example test_programs/execution_success/hashmap/src/main.nr rust + +### capacity + +#include_code capacity noir_stdlib/src/collections/map.nr rust + +Returns the maximum capacity of this hashmap. This is always equal to the capacity +specified in the hashmap's type. + +Unlike hashmaps in general purpose programming languages, hashmaps in Noir have a +static capacity that does not increase as the map grows larger. Thus, this capacity +is also the maximum possible element count that can be inserted into the hashmap. +Due to hash collisions (modulo the hashmap length), it is likely the actual maximum +element count will be lower than the full capacity. + +Example: + +#include_code capacity_example test_programs/execution_success/hashmap/src/main.nr rust + +### clear + +#include_code clear noir_stdlib/src/collections/map.nr rust + +Clears the hashmap, removing all key-value pairs from it. + +Example: + +#include_code clear_example test_programs/execution_success/hashmap/src/main.nr rust + +### contains_key + +#include_code contains_key noir_stdlib/src/collections/map.nr rust + +True if the hashmap contains the given key. Unlike `get`, this will not also return +the value associated with the key. + +Example: + +#include_code contains_key_example test_programs/execution_success/hashmap/src/main.nr rust + +### entries + +#include_code entries noir_stdlib/src/collections/map.nr rust + +Returns a vector of each key-value pair present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +#include_code entries_example test_programs/execution_success/hashmap/src/main.nr rust + +### keys + +#include_code keys noir_stdlib/src/collections/map.nr rust + +Returns a vector of each key present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +#include_code keys_example test_programs/execution_success/hashmap/src/main.nr rust + +### values + +#include_code values noir_stdlib/src/collections/map.nr rust + +Returns a vector of each value present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +#include_code values_example test_programs/execution_success/hashmap/src/main.nr rust + +### iter_mut + +#include_code iter_mut noir_stdlib/src/collections/map.nr rust + +Iterates through each key-value pair of the HashMap, setting each key-value pair to the +result returned from the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If this is not desired, use `iter_values_mut` if only values need to be mutated, +or `entries` if neither keys nor values need to be mutated. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +#include_code iter_mut_example test_programs/execution_success/hashmap/src/main.nr rust + +### iter_keys_mut + +#include_code iter_keys_mut noir_stdlib/src/collections/map.nr rust + +Iterates through the HashMap, mutating each key to the result returned from +the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If only iteration is desired and the keys are not intended to be mutated, +prefer using `entries` instead. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +#include_code iter_keys_mut_example test_programs/execution_success/hashmap/src/main.nr rust + +### iter_values_mut + +#include_code iter_values_mut noir_stdlib/src/collections/map.nr rust + +Iterates through the HashMap, applying the given function to each value and mutating the +value to equal the result. This function is more efficient than `iter_mut` and `iter_keys_mut` +because the keys are untouched and the underlying hashmap thus does not need to be reordered. + +Example: + +#include_code iter_values_mut_example test_programs/execution_success/hashmap/src/main.nr rust + +### retain + +#include_code retain noir_stdlib/src/collections/map.nr rust + +Retains only the key-value pairs for which the given function returns true. +Any key-value pairs for which the function returns false will be removed from the map. + +Example: + +#include_code retain_example test_programs/execution_success/hashmap/src/main.nr rust + +## Trait Implementations + +### default + +#include_code default noir_stdlib/src/collections/map.nr rust + +Constructs an empty HashMap. + +Example: + +#include_code default_example test_programs/execution_success/hashmap/src/main.nr rust + +### eq + +#include_code eq noir_stdlib/src/collections/map.nr rust + +Checks if two HashMaps are equal. + +Example: + +#include_code eq_example test_programs/execution_success/hashmap/src/main.nr rust diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx index 99b7f830a20..c2c0624dfad 100644 --- a/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx +++ b/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -15,6 +15,14 @@ Verifier for EdDSA signatures fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool ``` +It is also possible to specify the hash algorithm used for the signature by using the `eddsa_verify_with_hasher` function with a parameter implementing the Hasher trait. For instance, if you want to use Poseidon2 instead, you can do the following: +```rust +use dep::std::hash::poseidon2::Poseidon2Hasher; + +let mut hasher = Poseidon2Hasher::default(); +eddsa_verify_with_hasher(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg, &mut hasher); +``` + ## eddsa::eddsa_to_pub diff --git a/docs/scripts/codegen_nargo_reference.sh b/docs/scripts/codegen_nargo_reference.sh index 4ff7d43d142..6a9fda9420b 100755 --- a/docs/scripts/codegen_nargo_reference.sh +++ b/docs/scripts/codegen_nargo_reference.sh @@ -30,4 +30,4 @@ sidebar_position: 0 --- " > $NARGO_REFERENCE -cargo run -F codegen-docs -- info >> $NARGO_REFERENCE +cargo run --bin nargo -F codegen-docs -- info >> $NARGO_REFERENCE diff --git a/docs/versioned_docs/version-v0.25.0/explainers/explainer-oracle.md b/docs/versioned_docs/version-v0.25.0/explainers/explainer-oracle.md new file mode 100644 index 00000000000..b84ca5dd986 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/explainers/explainer-oracle.md @@ -0,0 +1,57 @@ +--- +title: Oracles +description: This guide provides an in-depth understanding of how Oracles work in Noir programming. Learn how to use outside calculations in your programs, constrain oracles, and understand their uses and limitations. +keywords: + - Noir Programming + - Oracles + - JSON-RPC + - Foreign Call Handlers + - Constrained Functions + - Blockchain Programming +sidebar_position: 1 +--- + +If you've seen "The Matrix" you may recall "The Oracle" as Gloria Foster smoking cigarettes and baking cookies. While she appears to "know things", she is actually providing a calculation of a pre-determined future. Noir Oracles are similar, in a way. They don't calculate the future (yet), but they allow you to use outside calculations in your programs. + +![matrix oracle prediction](@site/static/img/memes/matrix_oracle.jpeg) + +A Noir program is usually self-contained. You can pass certain inputs to it, and it will generate a deterministic output for those inputs. But what if you wanted to defer some calculation to an outside process or source? + +Oracles are functions that provide this feature. + +## Use cases + +An example usage for Oracles is proving something on-chain. For example, proving that the ETH-USDC quote was below a certain target at a certain block time. Or even making more complex proofs like proving the ownership of an NFT as an anonymous login method. + +Another interesting use case is to defer expensive calculations to be made outside of the Noir program, and then constraining the result; similar to the use of [unconstrained functions](../noir/concepts//unconstrained.md). + +In short, anything that can be constrained in a Noir program but needs to be fetched from an external source is a great candidate to be used in oracles. + +## Constraining oracles + +Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. + +To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have a oracle call like this: + +```rust +#[oracle(getNoun)] +unconstrained fn get_noun(address: Field) -> Field +``` + +This oracle could naively resolve with the number of Nouns she possesses. However, it is useless as a trusted source, as the oracle could resolve to anything Alice wants. In order to make this oracle call actually useful, Alice would need to constrain the response from the oracle, by proving her address and the noun count belongs to the state tree of the contract. + +In short, **Oracles don't prove anything. Your Noir program does.** + +:::danger + +If you don't constrain the return of your oracle, you could be clearly opening an attack vector on your Noir program. Make double-triple sure that the return of an oracle call is constrained! + +::: + +## How to use Oracles + +On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. + +In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they matches the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. + +If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/docs/versioned_docs/version-v0.25.0/explainers/explainer-recursion.md b/docs/versioned_docs/version-v0.25.0/explainers/explainer-recursion.md new file mode 100644 index 00000000000..18846176ca7 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/explainers/explainer-recursion.md @@ -0,0 +1,176 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +pagination_next: how_to/how-to-recursion +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof + +:::info + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he doesn't exactly *prove* the verification of Alice's proof. Instead, he *aggregates* his proof to Alice's proof. The actual verification is done when the full proof is verified, for example when using `nargo verify` or through the verifier smart contract. + +We can imagine recursive proofs a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +::: + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. + +## How can I try it + +Learn more about using recursion in Nargo and NoirJS in the [how-to guide](../how_to/how-to-recursion.md) and see a full example in [noir-examples](https://github.com/noir-lang/noir-examples). diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/_category_.json b/docs/versioned_docs/version-v0.25.0/getting_started/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/_category_.json b/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/index.md b/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/index.md new file mode 100644 index 00000000000..743c4d8d634 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/index.md @@ -0,0 +1,142 @@ +--- +title: Creating a Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ which contain the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../../noir/concepts/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution of our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/project_breakdown.md b/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/project_breakdown.md new file mode 100644 index 00000000000..6160a102c6c --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/hello_noir/project_breakdown.md @@ -0,0 +1,199 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program from the previous section. We elaborate on the project +structure and what the `prove` and `verify` commands did. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section defines a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, + is not equal. This inequality constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs, usually from external sources, and +verify the validity of the proof against it. + +Take a private asset transfer as an example: + +A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/installation/_category_.json b/docs/versioned_docs/version-v0.25.0/getting_started/installation/_category_.json new file mode 100644 index 00000000000..0c02fb5d4d7 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/installation/index.md b/docs/versioned_docs/version-v0.25.0/getting_started/installation/index.md new file mode 100644 index 00000000000..4ef86aa5914 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/installation/index.md @@ -0,0 +1,48 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +pagination_next: getting_started/hello_noir/index +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.25.0/getting_started/installation/other_install_methods.md new file mode 100644 index 00000000000..a35e34aaf9c --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/installation/other_install_methods.md @@ -0,0 +1,254 @@ +--- +title: Alternative Install Methods +description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Shell & editor experience + Building and testing + Uninstalling Nargo + Noir vs code extension, + ] +sidebar_position: 1 +--- + +## Encouraged Installation Method: Noirup + +Noirup is the endorsed method for installing Nargo, streamlining the process of fetching binaries or compiling from source. It supports a range of options to cater to your specific needs, from nightly builds and specific versions to compiling from various sources. + +### Installing Noirup + +First, ensure you have `noirup` installed: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +### Fetching Binaries + +With `noirup`, you can easily switch between different Nargo versions, including nightly builds: + +- **Nightly Version**: Install the latest nightly build. + + ```sh + noirup --version nightly + ``` + +- **Specific Version**: Install a specific version of Nargo. + ```sh + noirup --version + ``` + +### Compiling from Source + +`noirup` also enables compiling Nargo from various sources: + +- **From a Specific Branch**: Install from the latest commit on a branch. + + ```sh + noirup --branch + ``` + +- **From a Fork**: Install from the main branch of a fork. + + ```sh + noirup --repo + ``` + +- **From a Specific Branch in a Fork**: Install from a specific branch in a fork. + + ```sh + noirup --repo --branch + ``` + +- **From a Specific Pull Request**: Install from a specific PR. + + ```sh + noirup --pr + ``` + +- **From a Specific Commit**: Install from a specific commit. + + ```sh + noirup -C + ``` + +- **From Local Source**: Compile and install from a local directory. + ```sh + noirup --path ./path/to/local/source + ``` + +## Alternate Installation Methods (No Longer Recommended) + +While the following methods are available, they are no longer recommended. We advise using noirup for a more efficient and flexible installation experience. + +However, there are other methods for installing Nargo: + +- [Binaries](#option-1-installing-from-binaries) +- [Compiling from Source](#option-2-compile-from-source) +- [WSL for Windows](#option-3-wsl-for-windows) + +### Option 1: Installing from Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --version`. You should get a version number. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +### Option 2: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.73.0 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 3: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#encouraged-installation-method-noirup). + +## Uninstalling Nargo + +### Noirup + +If you installed Nargo with `noirup` or through directly downloading binaries, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Nargo with Nix or compiled it from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/tooling/_category_.json b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/_category_.json new file mode 100644 index 00000000000..55804c03a71 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 2, + "label": "Tooling", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/tooling/index.mdx b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/index.mdx new file mode 100644 index 00000000000..ac480f3c9f5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/index.mdx @@ -0,0 +1,38 @@ +--- +title: Tooling +Description: This section provides information about the various tools and utilities available for Noir development. It covers the Noir playground, IDE tools, Codespaces, and community projects. +Keywords: [Noir, Development, Playground, IDE Tools, Language Service Provider, VS Code Extension, Codespaces, noir-starter, Community Projects, Awesome Noir Repository, Developer Tooling] +--- + +Noir is meant to be easy to develop with. For that reason, a number of utilities have been put together to ease the development process as much as feasible in the zero-knowledge world. + +## Playground + +The Noir playground is an easy way to test small ideas, share snippets, and integrate in other websites. You can access it at [play.noir-lang.org](https://play.noir-lang.org). + +## IDE tools + +When you install Nargo, you're also installing a Language Service Provider (LSP), which can be used by IDEs to provide syntax highlighting, codelens, warnings, and more. + +The easiest way to use these tools is by installing the [Noir VS Code extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +## Codespaces + +Some Noir repos have leveraged Codespaces in order to ease the development process. You can visit the [noir-starter](https://github.com/noir-lang/noir-starter) for an example. + + + +## GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file in the Noir repo](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) for an example usage. + +## Community projects + +As an open-source project, Noir has received many contributions over time. Some of them are related with developer tooling, and you can see some of them in [Awesome Noir repository](https://github.com/noir-lang/awesome-noir#dev-tools) diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/tooling/language_server.md b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/language_server.md new file mode 100644 index 00000000000..81e0356ef8a --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.25.0/getting_started/tooling/testing.md b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/testing.md new file mode 100644 index 00000000000..d3e0c522473 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/getting_started/tooling/testing.md @@ -0,0 +1,62 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/versioned_docs/version-v0.25.0/how_to/_category_.json b/docs/versioned_docs/version-v0.25.0/how_to/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/how_to/how-to-oracles.md b/docs/versioned_docs/version-v0.25.0/how_to/how-to-oracles.md new file mode 100644 index 00000000000..0d84d992320 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/how_to/how-to-oracles.md @@ -0,0 +1,280 @@ +--- +title: How to use Oracles +description: Learn how to use oracles in your Noir program with examples in both Nargo and NoirJS. This guide also covers writing a JSON RPC server and providing custom foreign call handlers for NoirJS. +keywords: + - Noir Programming + - Oracles + - Nargo + - NoirJS + - JSON RPC Server + - Foreign Call Handlers +sidebar_position: 1 +--- + +This guide shows you how to use oracles in your Noir program. For the sake of clarity, it assumes that: + +- You have read the [explainer on Oracles](../explainers/explainer-oracle.md) and are comfortable with the concept. +- You have a Noir program to add oracles to. You can create one using the [vite-hardhat starter](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) as a boilerplate. +- You understand the concept of a JSON-RPC server. Visit the [JSON-RPC website](https://www.jsonrpc.org/) if you need a refresher. +- You are comfortable with server-side JavaScript (e.g. Node.js, managing packages, etc.). + +For reference, you can find the snippets used in this tutorial on the [Aztec DevRel Repository](https://github.com/AztecProtocol/dev-rel/tree/main/code-snippets/how-to-oracles). + +## Rundown + +This guide has 3 major steps: + +1. How to modify our Noir program to make use of oracle calls as unconstrained functions +2. How to write a JSON RPC Server to resolve these oracle calls with Nargo +3. How to use them in Nargo and how to provide a custom resolver in NoirJS + +## Step 1 - Modify your Noir program + +An oracle is defined in a Noir program by defining two methods: + +- An unconstrained method - This tells the compiler that it is executing an [unconstrained functions](../noir/concepts//unconstrained.md). +- A decorated oracle method - This tells the compiler that this method is an RPC call. + +An example of an oracle that returns a `Field` would be: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(number: Field) -> Field { } + +unconstrained fn get_sqrt(number: Field) -> Field { + sqrt(number) +} +``` + +In this example, we're wrapping our oracle function in a unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); +} +``` + +In the next section, we will make this `getSqrt` (defined on the `sqrt` decorator) be a method of the RPC server Noir will use. + +:::danger + +As explained in the [Oracle Explainer](../explainers/explainer-oracle.md), this `main` function is unsafe unless you constrain its return value. For example: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); + assert(sqrt.pow_32(2) as u64 == input as u64); // <---- constrain the return of an oracle! +} +``` + +::: + +:::info + +Currently, oracles only work with single params or array params. For example: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt([Field; 2]) -> [Field; 2] { } +``` + +::: + +## Step 2 - Write an RPC server + +Brillig will call *one* RPC server. Most likely you will have to write your own, and you can do it in whatever language you prefer. In this guide, we will do it in Javascript. + +Let's use the above example of an oracle that consumes an array with two `Field` and returns their square roots: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(input: [Field; 2]) -> [Field; 2] { } + +unconstrained fn get_sqrt(input: [Field; 2]) -> [Field; 2] { + sqrt(input) +} + +fn main(input: [Field; 2]) { + let sqrt = get_sqrt(input); + assert(sqrt[0].pow_32(2) as u64 == input[0] as u64); + assert(sqrt[1].pow_32(2) as u64 == input[1] as u64); +} +``` + +:::info + +Why square root? + +In general, computing square roots is computationally more expensive than multiplications, which takes a toll when speaking about ZK applications. In this case, instead of calculating the square root in Noir, we are using our oracle to offload that computation to be made in plain. In our circuit we can simply multiply the two values. + +::: + +Now, we should write the correspondent RPC server, starting with the [default JSON-RPC 2.0 boilerplate](https://www.npmjs.com/package/json-rpc-2.0#example): + +```js +import { JSONRPCServer } from "json-rpc-2.0"; +import express from "express"; +import bodyParser from "body-parser"; + +const app = express(); +app.use(bodyParser.json()); + +const server = new JSONRPCServer(); +app.post("/", (req, res) => { + const jsonRPCRequest = req.body; + server.receive(jsonRPCRequest).then((jsonRPCResponse) => { + if (jsonRPCResponse) { + res.json(jsonRPCResponse); + } else { + res.sendStatus(204); + } + }); +}); + +app.listen(5555); +``` + +Now, we will add our `getSqrt` method, as expected by the `#[oracle(getSqrt)]` decorator in our Noir code. It maps through the params array and returns their square roots: + +```js +server.addMethod("getSqrt", async (params) => { + const values = params[0].Array.map(({ inner }) => { + return { inner: `${Math.sqrt(parseInt(inner, 16))}` }; + }); + return { values: [{ Array: values }] }; +}); +``` + +:::tip + +Brillig expects an object with an array of values. Each value is an object declaring to be `Single` or `Array` and returning a `inner` property *as a string*. For example: + +```json +{ "values": [{ "Array": [{ "inner": "1" }, { "inner": "2"}]}]} +{ "values": [{ "Single": { "inner": "1" }}]} +{ "values": [{ "Single": { "inner": "1" }}, { "Array": [{ "inner": "1", { "inner": "2" }}]}]} +``` + +If you're using Typescript, the following types may be helpful in understanding the expected return value and making sure they're easy to follow: + +```js +interface Value { + inner: string, +} + +interface SingleForeignCallParam { + Single: Value, +} + +interface ArrayForeignCallParam { + Array: Value[], +} + +type ForeignCallParam = SingleForeignCallParam | ArrayForeignCallParam; + +interface ForeignCallResult { + values: ForeignCallParam[], +} +``` + +::: + +## Step 3 - Usage with Nargo + +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: + +```bash +nargo test --oracle-resolver http://localhost:5555 +``` + +This tells `nargo` to use your RPC Server URL whenever it finds an oracle decorator. + +## Step 4 - Usage with NoirJS + +In a JS environment, an RPC server is not strictly necessary, as you may want to resolve your oracles without needing any JSON call at all. NoirJS simply expects that you pass a callback function when you generate proofs, and that callback function can be anything. + +For example, if your Noir program expects the host machine to provide CPU pseudo-randomness, you could simply pass it as the `foreignCallHandler`. You don't strictly need to create an RPC server to serve pseudo-randomness, as you may as well get it directly in your app: + +```js +const foreignCallHandler = (name, inputs) => crypto.randomBytes(16) // etc + +await noir.generateFinalProof(inputs, foreignCallHandler) +``` + +As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) function simply means "a callback function that returns a value of type [`ForeignCallOutput`](../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md). It doesn't have to be an RPC call like in the case for Nargo. + +:::tip + +Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? + +You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. + +::: + +In this case, let's make `foreignCallHandler` call the JSON RPC Server we created in [Step #2](#step-2---write-an-rpc-server), by making it a JSON RPC Client. + +For example, using the same `getSqrt` program in [Step #1](#step-1---modify-your-noir-program) (comments in the code): + +```js +import { JSONRPCClient } from "json-rpc-2.0"; + +// declaring the JSONRPCClient +const client = new JSONRPCClient((jsonRPCRequest) => { +// hitting the same JSON RPC Server we coded above + return fetch("http://localhost:5555", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(jsonRPCRequest), + }).then((response) => { + if (response.status === 200) { + return response + .json() + .then((jsonRPCResponse) => client.receive(jsonRPCResponse)); + } else if (jsonRPCRequest.id !== undefined) { + return Promise.reject(new Error(response.statusText)); + } + }); +}); + +// declaring a function that takes the name of the foreign call (getSqrt) and the inputs +const foreignCallHandler = async (name, input) => { + // notice that the "inputs" parameter contains *all* the inputs + // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] + const oracleReturn = await client.request(name, [ + { Array: input[0].map((i) => ({ inner: i.toString("hex") })) }, + ]); + return [oracleReturn.values[0].Array.map((x) => x.inner)]; +}; + +// the rest of your NoirJS code +const input = { input: [4, 16] }; +const { witness } = await noir.execute(numbers, foreignCallHandler); +``` + +:::tip + +If you're in a NoirJS environment running your RPC server together with a frontend app, you'll probably hit a familiar problem in full-stack development: requests being blocked by [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policy. For development only, you can simply install and use the [`cors` npm package](https://www.npmjs.com/package/cors) to get around the problem: + +```bash +yarn add cors +``` + +and use it as a middleware: + +```js +import cors from "cors"; + +const app = express(); +app.use(cors()) +``` + +::: + +## Conclusion + +Hopefully by the end of this guide, you should be able to: + +- Write your own logic around Oracles and how to write a JSON RPC server to make them work with your Nargo commands. +- Provide custom foreign call handlers for NoirJS. diff --git a/docs/versioned_docs/version-v0.25.0/how_to/how-to-recursion.md b/docs/versioned_docs/version-v0.25.0/how_to/how-to-recursion.md new file mode 100644 index 00000000000..4c45bb87ae2 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/how_to/how-to-recursion.md @@ -0,0 +1,179 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume the following: + +- `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate recursive proof artifacts that will be passed to the circuit that is verifying the proof we just generated. Instead of passing the proof and verification key as a byte array, we pass them as fields which makes it cheaper to verify in a circuit: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateRecursiveProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]` and verification keys in Barretenberg are always `[Field; 114]`. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateProof(witness) +const verified = backend.verifyProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { + main: mainJSON, + recursive: recursiveJSON +} +const backends = { + main: new BarretenbergBackend(circuits.main), + recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { + main: new Noir(circuits.main, backends.main), + recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateRecursiveProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateProof(recursiveInputs) +``` + +::: diff --git a/docs/versioned_docs/version-v0.25.0/how_to/how-to-solidity-verifier.md b/docs/versioned_docs/version-v0.25.0/how_to/how-to-solidity-verifier.md new file mode 100644 index 00000000000..e3c7c1065da --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/how_to/how-to-solidity-verifier.md @@ -0,0 +1,231 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +pagination_next: tutorials/noirjs_app +--- + +Noir has the ability to generate a verifier contract in Solidity, which can be deployed in many EVM-compatible blockchains such as Ethereum. + +This allows for a powerful feature set, as one can make use of the conciseness and the privacy provided by Noir in an immutable ledger. Applications can range from simple P2P guessing games, to complex private DeFi interactions. + +This guide shows you how to generate a Solidity Verifier and deploy it on the [Remix IDE](https://remix.ethereum.org/). It is assumed that: + +- You are comfortable with the Solidity programming language and understand how contracts are deployed on the Ethereum network +- You have Noir installed and you have a Noir program. If you don't, [get started](../getting_started/installation/index.md) with Nargo and the example Hello Noir circuit +- You are comfortable navigating RemixIDE. If you aren't or you need a refresher, you can find some video tutorials [here](https://www.youtube.com/channel/UCjTUPyFEr2xDGN6Cg8nKDaA) that could help you. + +## Rundown + +Generating a Solidity Verifier contract is actually a one-command process. However, compiling it and deploying it can have some caveats. Here's the rundown of this guide: + +1. How to generate a solidity smart contract +2. How to compile the smart contract in the RemixIDE +3. How to deploy it to a testnet + +## Step 1 - Generate a contract + +This is by far the most straight-forward step. Just run: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. + +:::info + +It is possible to generate verifier contracts of Noir programs for other smart contract platforms as long as the proving backend supplies an implementation. + +Barretenberg, the default proving backend for Nargo, supports generation of verifier contracts, for the time being these are only in Solidity. +::: + +## Step 2 - Compiling + +We will mostly skip the details of RemixIDE, as the UI can change from version to version. For now, we can just open +Remix and create a blank workspace. + +![Create Workspace](@site/static/img/how-tos/solidity_verifier_1.png) + +We will create a new file to contain the contract Nargo generated, and copy-paste its content. + +:::warning + +You'll likely see a warning advising you to not trust pasted code. While it is an important warning, it is irrelevant in the context of this guide and can be ignored. We will not be deploying anywhere near a mainnet. + +::: + +To compile our the verifier, we can navigate to the compilation tab: + +![Compilation Tab](@site/static/img/how-tos/solidity_verifier_2.png) + +Remix should automatically match a suitable compiler version. However, hitting the "Compile" button will most likely generate a "Stack too deep" error: + +![Stack too deep](@site/static/img/how-tos/solidity_verifier_3.png) + +This is due to the verify function needing to put many variables on the stack, but enabling the optimizer resolves the issue. To do this, let's open the "Advanced Configurations" tab and enable optimization. The default 200 runs will suffice. + +:::info + +This time we will see a warning about an unused function parameter. This is expected, as the `verify` function doesn't use the `_proof` parameter inside a solidity block, it is loaded from calldata and used in assembly. + +::: + +![Compilation success](@site/static/img/how-tos/solidity_verifier_4.png) + +## Step 3 - Deploying + +At this point we should have a compiled contract read to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. + +Looking closely, we will notice that our "Solidity Verifier" is actually three contracts working together: + +- An `UltraVerificationKey` library which simply stores the verification key for our circuit. +- An abstract contract `BaseUltraVerifier` containing most of the verifying logic. +- A main `UltraVerifier` contract that inherits from the Base and uses the Key contract. + +Remix will take care of the dependencies for us so we can simply deploy the UltraVerifier contract by selecting it and hitting "deploy": + +![Deploying UltraVerifier](@site/static/img/how-tos/solidity_verifier_5.png) + +A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking the deployer contract is the correct one. + +:::note + +Why "UltraVerifier"? + +To be precise, the Noir compiler (`nargo`) doesn't generate the verifier contract directly. It compiles the Noir code into an intermediate language (ACIR), which is then executed by the backend. So it is the backend that returns the verifier smart contract, not Noir. + +In this case, the Barretenberg Backend uses the UltraPlonk proving system, hence the "UltraVerifier" name. + +::: + +## Step 4 - Verifying + +To verify a proof using the Solidity verifier contract, we call the `verify` function in this extended contract: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: + +``` +0x...... , [0x0000.....02] +``` + +A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +:::info[Return Values] + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +For example, if you have Noir program like this: + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. + +Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. + +::: + +:::tip[Structs] + +You can pass structs to the verifier contract. They will be flattened so that the array of inputs is 1-dimensional array. + +For example, consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +::: + +The other function you can call is our entrypoint `verify` function, as defined above. + +:::tip + +It's worth noticing that the `verify` function is actually a `view` function. A `view` function does not alter the blockchain state, so it doesn't need to be distributed (i.e. it will run only on the executing node), and therefore doesn't cost any gas. + +This can be particularly useful in some situations. If Alice generated a proof and wants Bob to verify its correctness, Bob doesn't need to run Nargo, NoirJS, or any Noir specific infrastructure. He can simply make a call to the blockchain with the proof and verify it is correct without paying any gas. + +It would be incorrect to say that a Noir proof verification costs any gas at all. However, most of the time the result of `verify` is used to modify state (for example, to update a balance, a game state, etc). In that case the whole network needs to execute it, which does incur gas costs (calldata and execution, but not storage). + +::: + +## A Note on EVM chains + +ZK-SNARK verification depends on some precompiled cryptographic primitives such as Elliptic Curve Pairings (if you like complex math, you can read about EC Pairings [here](https://medium.com/@VitalikButerin/exploring-elliptic-curve-pairings-c73c1864e627)). Not all EVM chains support EC Pairings, notably some of the ZK-EVMs. This means that you won't be able to use the verifier contract in all of them. + +For example, chains like `zkSync ERA` and `Polygon zkEVM` do not currently support these precompiles, so proof verification via Solidity verifier contracts won't work. Here's a quick list of EVM chains that have been tested and are known to work: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +## What's next + +Now that you know how to call a Noir Solidity Verifier on a smart contract using Remix, you should be comfortable with using it with some programmatic frameworks, such as [hardhat](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) and [foundry](https://github.com/noir-lang/noir-starter/tree/main/with-foundry). + +You can find other tools, examples, boilerplates and libraries in the [awesome-noir](https://github.com/noir-lang/awesome-noir) repository. + +You should also be ready to write and deploy your first NoirJS app and start generating proofs on websites, phones, and NodeJS environments! Head on to the [NoirJS tutorial](../tutorials/noirjs_app.md) to learn how to do that. diff --git a/docs/versioned_docs/version-v0.25.0/how_to/merkle-proof.mdx b/docs/versioned_docs/version-v0.25.0/how_to/merkle-proof.mdx new file mode 100644 index 00000000000..34074659ac1 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/how_to/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.25.0/how_to/using-devcontainers.mdx b/docs/versioned_docs/version-v0.25.0/how_to/using-devcontainers.mdx new file mode 100644 index 00000000000..727ec6ca667 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/how_to/using-devcontainers.mdx @@ -0,0 +1,110 @@ +--- +title: Developer Containers and Codespaces +description: "Learn how to set up a devcontainer in your GitHub repository for a seamless coding experience with Codespaces. Follow our easy 8-step guide to create your own Noir environment without installing Nargo locally." +keywords: ["Devcontainer", "Codespaces", "GitHub", "Noir Environment", "Docker Image", "Development Environment", "Remote Coding", "GitHub Codespaces", "Noir Programming", "Nargo", "VSCode Extensions", "Noirup"] +sidebar_position: 1 +--- + +Adding a developer container configuration file to your Noir project is one of the easiest way to unlock coding in browser. + +## What's a devcontainer after all? + +A [Developer Container](https://containers.dev/) (devcontainer for short) is a Docker image that comes preloaded with tools, extensions, and other tools you need to quickly get started or continue a project, without having to install Nargo locally. Think of it as a development environment in a box. + +There are many advantages to this: + +- It's platform and architecture agnostic +- You don't need to have an IDE installed, or Nargo, or use a terminal at all +- It's safer for using on a public machine or public network + +One of the best ways of using devcontainers is... not using your machine at all, for maximum control, performance, and ease of use. +Enter Codespaces. + +## Codespaces + +If a devcontainer is just a Docker image, then what stops you from provisioning a `p3dn.24xlarge` AWS EC2 instance with 92 vCPUs and 768 GiB RAM and using it to prove your 10-gate SNARK proof? + +Nothing! Except perhaps the 30-40$ per hour it will cost you. + +The problem is that provisioning takes time, and I bet you don't want to see the AWS console every time you want to code something real quick. + +Fortunately, there's an easy and free way to get a decent remote machine ready and loaded in less than 2 minutes: Codespaces. [Codespaces is a Github feature](https://github.com/features/codespaces) that allows you to code in a remote machine by using devcontainers, and it's pretty cool: + +- You can start coding Noir in less than a minute +- It uses the resources of a remote machine, so you can code on your grandma's phone if needed be +- It makes it easy to share work with your frens +- It's fully reusable, you can stop and restart whenever you need to + +:::info + +Don't take out your wallet just yet. Free GitHub accounts get about [15-60 hours of coding](https://github.com/features/codespaces) for free per month, depending on the size of your provisioned machine. + +::: + +## Tell me it's _actually_ easy + +It is! + +Github comes with a default codespace and you can use it to code your own devcontainer. That's exactly what we will be doing in this guide. + + + +8 simple steps: + +#### 1. Create a new repository on GitHub. + +#### 2. Click "Start coding with Codespaces". This will use the default image. + +#### 3. Create a folder called `.devcontainer` in the root of your repository. + +#### 4. Create a Dockerfile in that folder, and paste the following code: + +```docker +FROM --platform=linux/amd64 node:lts-bookworm-slim +SHELL ["/bin/bash", "-c"] +RUN apt update && apt install -y curl bash git tar gzip libc++-dev +RUN curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +ENV PATH="/root/.nargo/bin:$PATH" +RUN noirup +ENTRYPOINT ["nargo"] +``` +#### 5. Create a file called `devcontainer.json` in the same folder, and paste the following code: + +```json +{ + "name": "Noir on Codespaces", + "build": { + "context": ".", + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": ["noir-lang.vscode-noir"] + } + } +} +``` +#### 6. Commit and push your changes + +This will pull the new image and build it, so it could take a minute or so + +#### 8. Done! +Just wait for the build to finish, and there's your easy Noir environment. + + +Refer to [noir-starter](https://github.com/noir-lang/noir-starter/) as an example of how devcontainers can be used together with codespaces. + + + +## How do I use it? + +Using the codespace is obviously much easier than setting it up. +Just navigate to your repository and click "Code" -> "Open with Codespaces". It should take a few seconds to load, and you're ready to go. + +:::info + +If you really like the experience, you can add a badge to your readme, links to existing codespaces, and more. +Check out the [official docs](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/facilitating-quick-creation-and-resumption-of-codespaces) for more info. diff --git a/docs/versioned_docs/version-v0.25.0/index.mdx b/docs/versioned_docs/version-v0.25.0/index.mdx new file mode 100644 index 00000000000..75086ddcdde --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/index.mdx @@ -0,0 +1,67 @@ +--- +title: Noir Lang +hide_title: true +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. +keywords: + [Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language] +sidebar_position: 0 +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Noir Logo + +Noir is a Domain-Specific Language for SNARK proving systems developed by [Aztec Labs](https://aztec.network/). It allows you to generate complex Zero-Knowledge Programs (ZKP) by using simple and flexible syntax, requiring no previous knowledge on the underlying mathematics or cryptography. + +ZK programs are programs that can generate short proofs of a certain statement without revealing some details about it. You can read more about ZKPs [here](https://dev.to/spalladino/a-beginners-intro-to-coding-zero-knowledge-proofs-c56). + +## What's new about Noir? + +Noir works differently from most ZK languages by taking a two-pronged path. First, it compiles the program to an adaptable intermediate language known as ACIR. From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with the proving backend. + +:::info + +Noir is backend agnostic, which means it makes no assumptions on which proving backend powers the ZK proof. Being the language that powers [Aztec Contracts](https://docs.aztec.network/developers/contracts/main), it defaults to Aztec's Barretenberg proving backend. + +However, the ACIR output can be transformed to be compatible with other PLONK-based backends, or into a [rank-1 constraint system](https://www.rareskills.io/post/rank-1-constraint-system) suitable for backends such as Arkwork's Marlin. + +::: + +## Who is Noir for? + +Noir can be used both in complex cloud-based backends and in user's smartphones, requiring no knowledge on the underlying math or cryptography. From authorization systems that keep a password in the user's device, to complex on-chain verification of recursive proofs, Noir is designed to abstract away complexity without any significant overhead. Here are some examples of situations where Noir can be used: + + + + Noir Logo + + Aztec Contracts leverage Noir to allow for the storage and execution of private information. Writing an Aztec Contract is as easy as writing Noir, and Aztec developers can easily interact with the network storage and execution through the [Aztec.nr](https://docs.aztec.network/developers/contracts/main) library. + + + Soliditry Verifier Example + Noir can auto-generate Solidity verifier contracts that verify Noir proofs. This allows for non-interactive verification of proofs containing private information in an immutable system. This feature powers a multitude of use-case scenarios, from P2P chess tournaments, to [Aztec Layer-2 Blockchain](https://docs.aztec.network/) + + + Aztec Labs developed NoirJS, an easy interface to generate and verify Noir proofs in a Javascript environment. This allows for Noir to be used in webpages, mobile apps, games, and any other environment supporting JS execution in a standalone manner. + + + + +## Libraries + +Noir is meant to be easy to extend by simply importing Noir libraries just like in Rust. +The [awesome-noir repo](https://github.com/noir-lang/awesome-noir#libraries) is a collection of libraries developed by the Noir community. +Writing a new library is easy and makes code be composable and easy to reuse. See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/docs/versioned_docs/version-v0.25.0/migration_notes.md b/docs/versioned_docs/version-v0.25.0/migration_notes.md new file mode 100644 index 00000000000..6bd740024e5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/migration_notes.md @@ -0,0 +1,105 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +### `backend encountered an error: libc++.so.1` + +Depending on your OS, you may encounter the following error when running `nargo prove` for the first time: + +```text +The backend encountered an error: "/home/codespace/.nargo/backends/acvm-backend-barretenberg/backend_binary: error while loading shared libraries: libc++.so.1: cannot open shared object file: No such file or directory\n" +``` + +Install the `libc++-dev` library with: + +```bash +sudo apt install libc++-dev +``` + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/_category_.json b/docs/versioned_docs/version-v0.25.0/noir/concepts/_category_.json new file mode 100644 index 00000000000..7da08f8a8c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Concepts", + "position": 0, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/assert.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/assert.md new file mode 100644 index 00000000000..bcff613a695 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/assert.md @@ -0,0 +1,45 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +Aside string literals, the optional message can be a format string or any other type supported as input for Noir's [print](../standard_library/logging.md) functions. This feature lets you incorporate runtime variables into your failed assertion logs: + +```rust +assert(x == y, f"Expected x == y, but got {x} == {y}"); +``` + +Using a variable as an assertion message directly: + +```rust +struct myStruct { + myField: Field +} + +let s = myStruct { myField: y }; +assert(s.myField == x, s); +``` + diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/comments.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/comments.md new file mode 100644 index 00000000000..b51a85f5c94 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 10 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/control_flow.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/control_flow.md new file mode 100644 index 00000000000..4ce65236db3 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/control_flow.md @@ -0,0 +1,45 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_bus.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_bus.md new file mode 100644 index 00000000000..e54fc861257 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_bus.md @@ -0,0 +1,21 @@ +--- +title: Data Bus +sidebar_position: 13 +--- +**Disclaimer** this feature is experimental, do not use it! + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/_category_.json b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/arrays.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/arrays.md new file mode 100644 index 00000000000..a8bd338e736 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/arrays.md @@ -0,0 +1,251 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` +However, multidimensional slices are not supported. For example, the following code will error at compile time: +```rust +let slice : [[Field]] = []; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays. +Each of these functions are located within the generic impl `impl [T; N] {`. +So anywhere `self` appears, it refers to the variable `self: [T; N]`. + +### len + +Returns the length of an array + +```rust +fn len(self) -> Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(self) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(self, ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(self, f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(self, f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/booleans.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/booleans.md new file mode 100644 index 00000000000..69826fcd724 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/booleans.md @@ -0,0 +1,31 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/fields.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/fields.md new file mode 100644 index 00000000000..99b4aa63549 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/fields.md @@ -0,0 +1,192 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### assert_max_bit_size + +Adds a constraint to specify that the field can be represented with `bit_size` number of bits + +```rust +fn assert_max_bit_size(self, bit_size: u32) +``` + +example: + +```rust +fn main() { + let field = 2 + field.assert_max_bit_size(32); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` + + +### lt + +Returns true if the field is less than the other field + +```rust +pub fn lt(self, another: Field) -> bool +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/function_types.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/function_types.md new file mode 100644 index 00000000000..f6121af17e2 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/index.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/index.md new file mode 100644 index 00000000000..97b3b2cb094 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/index.md @@ -0,0 +1,110 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](@site/docs/noir/concepts/generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can even refer to other aliases. An error will be issued if they form a cycle: + +```rust +// Ok! +type A = B; +type B = Field; + +type Bad1 = Bad2; + +// error: Dependency cycle found +type Bad2 = Bad1; +// ^^^^^^^^^^^ 'Bad2' recursively depends on itself: Bad2 -> Bad1 -> Bad2 +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/integers.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/integers.md new file mode 100644 index 00000000000..4d58d96fed5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/integers.md @@ -0,0 +1,155 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +## 128 bits Unsigned Integers + +The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: +- You cannot cast between a native integer and `U128` +- There is a higher performance cost when using `U128`, compared to a native type. + +Conversion between unsigned integer types and U128 are done through the use of `from_integer` and `to_integer` functions. + +```rust +fn main() { + let x = U128::from_integer(23); + let y = U128::from_hex("0x7"); + let z = x + y; + assert(z.to_integer() == 30); +} +``` + +`U128` is implemented with two 64 bits limbs, representing the low and high bits, which explains the performance cost. You should expect `U128` to be twice more costly for addition and four times more costly for multiplication. +You can construct a U128 from its limbs: +```rust +fn main(x: u64, y: u64) { + let x = U128::from_u64s_be(x,y); + assert(z.hi == x as Field); + assert(z.lo == y as Field); +} +``` + +Note that the limbs are stored as Field elements in order to avoid unnecessary conversions. +Apart from this, most operations will work as usual: + +```rust +fn main(x: U128, y: U128) { + // multiplication + let c = x * y; + // addition and subtraction + let c = c - x + y; + // division + let c = x / y; + // bit operation; + let c = x & y | y; + // bit shift + let c = x << y; + // comparisons; + let c = x < y; + let c = x == y; +} +``` + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x, y) +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/references.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/references.md new file mode 100644 index 00000000000..a5293d11cfb --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/slices.mdx b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/slices.mdx new file mode 100644 index 00000000000..4a6ee816aa2 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/slices.mdx @@ -0,0 +1,147 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/strings.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/strings.md new file mode 100644 index 00000000000..311dfd64416 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/strings.md @@ -0,0 +1,80 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/structs.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/structs.md new file mode 100644 index 00000000000..dbf68c99813 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/tuples.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/tuples.md new file mode 100644 index 00000000000..2ec5c9c4113 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/distinct.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/distinct.md new file mode 100644 index 00000000000..6c993b8b5e0 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/distinct.md @@ -0,0 +1,64 @@ +--- +title: Distinct Witnesses +sidebar_position: 11 +--- + +The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures +that the witnesses being returned as public inputs are all unique. + +The `distinct` keyword is only used for return values on program entry points (usually the `main()` +function). + +When using `distinct` and `pub` simultaneously, `distinct` comes first. See the example below. + +You can read more about the problem this solves +[here](https://github.com/noir-lang/noir/issues/1183). + +## Example + +Without the `distinct` keyword, the following program + +```rust +fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + "return_witnesses": [3, 2, 4, 4] + } +} +``` + +Whereas (with the `distinct` keyword) + +```rust +fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + //... + "return_witnesses": [3, 4, 5, 6] + } +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/functions.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/functions.md new file mode 100644 index 00000000000..48aba9cd058 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../getting_started/tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/generics.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/generics.md new file mode 100644 index 00000000000..ddd42bf1f9b --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/generics.md @@ -0,0 +1,106 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 7 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn print(self) { + for _i in 0 .. self.count { + println(self.value); + } + } +} + +fn main() { + let repeated = RepeatedValue { value: "Hello!", count: 2 }; + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Since a generic type `T` can represent any type, how can we call functions on the underlying type? +In other words, how can we go from "any type `T`" to "any type `T` that has certain methods available?" + +This is what [traits](../concepts/traits) are for in Noir. Here's an example of a function generic over +any type `T` that implements the `Eq` trait for equality: + +```rust +fn first_element_is_equal(array1: [T; N], array2: [T; N]) -> bool + where T: Eq +{ + if (array1.len() == 0) | (array2.len() == 0) { + true + } else { + array1[0] == array2[0] + } +} + +fn main() { + assert(first_element_is_equal([1, 2, 3], [1, 5, 6])); + + // We can use first_element_is_equal for arrays of any type + // as long as we have an Eq impl for the types we pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} + +impl Eq for MyStruct { + fn eq(self, other: MyStruct) -> bool { + self.foo == other.foo + } +} +``` + +You can find more details on traits and trait implementations on the [traits page](../concepts/traits). diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/globals.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/globals.md new file mode 100644 index 00000000000..063a3d89248 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/globals.md @@ -0,0 +1,72 @@ +--- +title: Global Variables +description: + Learn about global variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, globals, global variables, constants] +sidebar_position: 8 +--- + +## Globals + + +Noir supports global variables. The global's type can be inferred by the compiler entirely: + +```rust +global N = 5; // Same as `global N: Field = 5` + +global TUPLE = (3, 2); + +fn main() { + assert(N == 5); + assert(N == TUPLE.0 + TUPLE.1); +} +``` + +:::info + +Globals can be defined as any expression, so long as they don't depend on themselves - otherwise there would be a dependency cycle! For example: + +```rust +global T = foo(T); // dependency error +``` + +::: + + +If they are initialized to a literal integer, globals can be used to specify an array's length: + +```rust +global N: Field = 2; + +fn main(y : [Field; N]) { + assert(y[0] == y[1]) +} +``` + +A global from another module can be imported or referenced externally like any other name: + +```rust +global N = 20; + +fn main() { + assert(my_submodule::N != N); +} + +mod my_submodule { + global N: Field = 10; +} +``` + +When a global is used, Noir replaces the name with its definition on each occurrence. +This means globals defined using function calls will repeat the call each time they're used: + +```rust +global RESULT = foo(); + +fn foo() -> [Field; 100] { ... } +``` + +This is usually fine since Noir will generally optimize any function call that does not +refer to a program input into a constant. It should be kept in mind however, if the called +function performs side-effects like `println`, as these will still occur on each use. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/lambdas.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/lambdas.md new file mode 100644 index 00000000000..be3c7e0b5ca --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 9 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/mutability.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/mutability.md new file mode 100644 index 00000000000..fdeef6a87c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/mutability.md @@ -0,0 +1,121 @@ +--- +title: Mutability +description: + Learn about mutable variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables] +sidebar_position: 8 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Non-local mutability + +Non-local mutability can be achieved through the mutable reference type `&mut T`: + +```rust +fn set_to_zero(x: &mut Field) { + *x = 0; +} + +fn main() { + let mut y = 42; + set_to_zero(&mut y); + assert(*y == 0); +} +``` + +When creating a mutable reference, the original variable being referred to (`y` in this +example) must also be mutable. Since mutable references are a reference type, they must +be explicitly dereferenced via `*` to retrieve the underlying value. Note that this yields +a copy of the value, so mutating this copy will not change the original value behind the +reference: + +```rust +fn main() { + let mut x = 1; + let x_ref = &mut x; + + let mut y = *x_ref; + let y_ref = &mut y; + + x = 2; + *x_ref = 3; + + y = 4; + *y_ref = 5; + + assert(x == 3); + assert(*x_ref == 3); + assert(y == 5); + assert(*y_ref == 5); +} +``` + +Note that types in Noir are actually deeply immutable so the copy that occurs when +dereferencing is only a conceptual copy - no additional constraints will occur. + +Mutable references can also be stored within structs. Note that there is also +no lifetime parameter on these unlike rust. This is because the allocated memory +always lasts the entire program - as if it were an array of one element. + +```rust +struct Foo { + x: &mut Field +} + +impl Foo { + fn incr(mut self) { + *self.x += 1; + } +} + +fn main() { + let foo = Foo { x: &mut 0 }; + foo.incr(); + assert(*foo.x == 1); +} +``` + +In general, you should avoid non-local & shared mutability unless it is needed. Sticking +to only local mutability will improve readability and potentially improve compiler optimizations as well. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/ops.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/ops.md new file mode 100644 index 00000000000..60425cb8994 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/oracles.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/oracles.md new file mode 100644 index 00000000000..2e6a6818d48 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/oracles.md @@ -0,0 +1,23 @@ +--- +title: Oracles +description: Dive into how Noir supports Oracles via RPC calls, and learn how to declare an Oracle in Noir with our comprehensive guide. +keywords: + - Noir + - Oracles + - RPC Calls + - Unconstrained Functions + - Programming + - Blockchain +sidebar_position: 6 +--- + +Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. + +Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) + +You can declare an Oracle through the `#[oracle()]` flag. Example: + +```rust +#[oracle(get_number_sequence)] +unconstrained fn get_number_sequence(_size: Field) -> [Field] {} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/shadowing.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/shadowing.md new file mode 100644 index 00000000000..5ce6130d201 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 12 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/traits.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/traits.md new file mode 100644 index 00000000000..ef1445a5907 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/traits.md @@ -0,0 +1,389 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +sidebar_position: 14 +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +## Generic Traits + +Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in +scope of every item within the trait. + +```rust +trait Into { + // Convert `self` to type `T` + fn into(self) -> T; +} +``` + +When implementing generic traits the generic arguments of the trait must be specified. This is also true anytime +when referencing a generic trait (e.g. in a `where` clause). + +```rust +struct MyStruct { + array: [Field; 2], +} + +impl Into<[Field; 2]> for MyStruct { + fn into(self) -> [Field; 2] { + self.array + } +} + +fn as_array(x: T) -> [Field; 2] + where T: Into<[Field; 2]> +{ + x.into() +} + +fn main() { + let array = [1, 2]; + let my_struct = MyStruct { array }; + + assert_eq(as_array(my_struct), array); +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: dep::some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: dep::some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/docs/versioned_docs/version-v0.25.0/noir/concepts/unconstrained.md b/docs/versioned_docs/version-v0.25.0/noir/concepts/unconstrained.md new file mode 100644 index 00000000000..89d12c1c971 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/concepts/unconstrained.md @@ -0,0 +1,95 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the AND against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/_category_.json b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/_category_.json new file mode 100644 index 00000000000..1debcfe7675 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..95ee9f52ab2 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/noir-projects/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..04c1703d929 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/modules.md new file mode 100644 index 00000000000..ae822a1cff4 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/modules.md @@ -0,0 +1,105 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..67a1dafa372 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,40 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/_category_.json b/docs/versioned_docs/version-v0.25.0/noir/standard_library/_category_.json new file mode 100644 index 00000000000..af04c0933fd --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/black_box_fns.md new file mode 100644 index 00000000000..eae8744abf0 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/black_box_fns.md @@ -0,0 +1,31 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +The ACVM spec defines a set of blackbox functions which backends will be expected to implement. This allows backends to use optimized implementations of these constraints if they have them, however they may also fallback to less efficient naive implementations if not. + +## Function list + +Here is a list of the current black box functions: + +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Blake3](./cryptographic_primitives/hashes#blake3) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/bn254.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/bn254.md new file mode 100644 index 00000000000..3294f005dbb --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/bn254.md @@ -0,0 +1,46 @@ +--- +title: Bn254 Field Library +--- + +Noir provides a module in standard library with some optimized functions for bn254 Fr in `std::field::bn254`. + +## decompose + +```rust +fn decompose(x: Field) -> (Field, Field) {} +``` + +Decomposes a single field into two fields, low and high. The low field contains the lower 16 bytes of the input field and the high field contains the upper 16 bytes of the input field. Both field results are range checked to 128 bits. + + +## assert_gt + +```rust +fn assert_gt(a: Field, b: Field) {} +``` + +Asserts that a > b. This will generate less constraints than using `assert(gt(a, b))`. + +## assert_lt + +```rust +fn assert_lt(a: Field, b: Field) {} +``` + +Asserts that a < b. This will generate less constraints than using `assert(lt(a, b))`. + +## gt + +```rust +fn gt(a: Field, b: Field) -> bool {} +``` + +Returns true if a > b. + +## lt + +```rust +fn lt(a: Field, b: Field) -> bool {} +``` + +Returns true if a < b. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/boundedvec.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/boundedvec.md new file mode 100644 index 00000000000..ce4529f6e57 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/boundedvec.md @@ -0,0 +1,326 @@ +--- +title: Bounded Vectors +keywords: [noir, vector, bounded vector, slice] +sidebar_position: 1 +--- + +A `BoundedVec` is a growable storage similar to a `Vec` except that it +is bounded with a maximum possible length. Unlike `Vec`, `BoundedVec` is not implemented +via slices and thus is not subject to the same restrictions slices are (notably, nested +slices - and thus nested vectors as well - are disallowed). + +Since a BoundedVec is backed by a normal array under the hood, growing the BoundedVec by +pushing an additional element is also more efficient - the length only needs to be increased +by one. + +For these reasons `BoundedVec` should generally be preferred over `Vec` when there +is a reasonable maximum bound that can be placed on the vector. + +Example: + +```rust +let mut vector: BoundedVec = BoundedVec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +assert(vector.max_len() == 10); +``` + +## Methods + +### new + +```rust +pub fn new() -> Self +``` + +Creates a new, empty vector of length zero. + +Since this container is backed by an array internally, it still needs an initial value +to give each element. To resolve this, each element is zeroed internally. This value +is guaranteed to be inaccessible unless `get_unchecked` is used. + +Example: + +```rust +let empty_vector: BoundedVec = BoundedVec::new(); +assert(empty_vector.len() == 0); +``` + +Note that whenever calling `new` the maximum length of the vector should always be specified +via a type signature: + +```rust title="new_example" showLineNumbers +fn foo() -> BoundedVec { + // Ok! MaxLen is specified with a type annotation + let v1: BoundedVec = BoundedVec::new(); + let v2 = BoundedVec::new(); + + // Ok! MaxLen is known from the type of foo's return value + v2 +} + +fn bad() { + let mut v3 = BoundedVec::new(); + + // Not Ok! We don't know if v3's MaxLen is at least 1, and the compiler often infers 0 by default. + v3.push(5); +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L11-L27 + + +This defaulting of `MaxLen` (and numeric generics in general) to zero may change in future noir versions +but for now make sure to use type annotations when using bounded vectors. Otherwise, you will receive a constraint failure at runtime when the vec is pushed to. + +### get + +```rust +pub fn get(mut self: Self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero. + +If the given index is equal to or greater than the length of the vector, this +will issue a constraint failure. + +Example: + +```rust +fn foo(v: BoundedVec) { + let first = v.get(0); + let last = v.get(v.len() - 1); + assert(first != last); +} +``` + +### get_unchecked + +```rust +pub fn get_unchecked(mut self: Self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero, without +performing a bounds check. + +Since this function does not perform a bounds check on length before accessing the element, +it is unsafe! Use at your own risk! + +Example: + +```rust title="get_unchecked_example" showLineNumbers +fn sum_of_first_three(v: BoundedVec) -> u32 { + // Always ensure the length is larger than the largest + // index passed to get_unchecked + assert(v.len() > 2); + let first = v.get_unchecked(0); + let second = v.get_unchecked(1); + let third = v.get_unchecked(2); + first + second + third +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L54-L64 + + + +### push + +```rust +pub fn push(&mut self, elem: T) { +``` + +Pushes an element to the end of the vector. This increases the length +of the vector by one. + +Panics if the new length of the vector will be greater than the max length. + +Example: + +```rust title="bounded-vec-push-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + v.push(1); + v.push(2); + + // Panics with failed assertion "push out of bounds" + v.push(3); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L68-L76 + + +### pop + +```rust +pub fn pop(&mut self) -> T +``` + +Pops the element at the end of the vector. This will decrease the length +of the vector by one. + +Panics if the vector is empty. + +Example: + +```rust title="bounded-vec-pop-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.push(1); + v.push(2); + + let two = v.pop(); + let one = v.pop(); + + assert(two == 2); + assert(one == 1); + // error: cannot pop from an empty vector + // let _ = v.pop(); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L81-L93 + + +### len + +```rust +pub fn len(self) -> u64 { +``` + +Returns the current length of this vector + +Example: + +```rust title="bounded-vec-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + assert(v.len() == 0); + + v.push(100); + assert(v.len() == 1); + + v.push(200); + v.push(300); + v.push(400); + assert(v.len() == 4); + + let _ = v.pop(); + let _ = v.pop(); + assert(v.len() == 2); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L98-L113 + + +### max_len + +```rust +pub fn max_len(_self: BoundedVec) -> u64 { +``` + +Returns the maximum length of this vector. This is always +equal to the `MaxLen` parameter this vector was initialized with. + +Example: + +```rust title="bounded-vec-max-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.max_len() == 5); + v.push(10); + assert(v.max_len() == 5); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L118-L124 + + +### storage + +```rust +pub fn storage(self) -> [T; MaxLen] { +``` + +Returns the internal array within this vector. +Since arrays in Noir are immutable, mutating the returned storage array will not mutate +the storage held internally by this vector. + +Note that uninitialized elements may be zeroed out! + +Example: + +```rust title="bounded-vec-storage-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.storage() == [0, 0, 0, 0, 0]); + + v.push(57); + assert(v.storage() == [57, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L129-L136 + + +### extend_from_array + +```rust +pub fn extend_from_array(&mut self, array: [T; Len]) +``` + +Pushes each element from the given array to this vector. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-array-example" showLineNumbers +let mut vec: BoundedVec = BoundedVec::new(); + vec.extend_from_array([2, 4]); + + assert(vec.len == 2); + assert(vec.get(0) == 2); + assert(vec.get(1) == 4); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L141-L148 + + +### extend_from_bounded_vec + +```rust +pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) +``` + +Pushes each element from the other vector to this vector. The length of +the other vector is left unchanged. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-bounded-vec-example" showLineNumbers +let mut v1: BoundedVec = BoundedVec::new(); + let mut v2: BoundedVec = BoundedVec::new(); + + v2.extend_from_array([1, 2, 3]); + v1.extend_from_bounded_vec(v2); + + assert(v1.storage() == [1, 2, 3, 0, 0]); + assert(v2.storage() == [1, 2, 3, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L153-L162 + + +### any + +```rust +pub fn any(self, predicate: fn[Env](T) -> bool) -> bool +``` + +Returns true if the given predicate returns true for any element +in this vector. + +Example: + +```rust title="bounded-vec-any-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.extend_from_array([2, 4, 6]); + + let all_even = !v.any(|elem: u32| elem % 2 != 0); + assert(all_even); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L229-L235 + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/hashmap.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/hashmap.md new file mode 100644 index 00000000000..91604af765d --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/hashmap.md @@ -0,0 +1,569 @@ +--- +title: HashMap +keywords: [noir, map, hash, hashmap] +sidebar_position: 1 +--- + +`HashMap` is used to efficiently store and look up key-value pairs. + +`HashMap` is a bounded type which can store anywhere from zero to `MaxLen` total elements. +Note that due to hash collisions, the actual maximum number of elements stored by any particular +hashmap is likely lower than `MaxLen`. This is true even with cryptographic hash functions since +every hash value will be performed modulo `MaxLen`. + +When creating `HashMap`s, the `MaxLen` generic should always be specified if it is not already +known. Otherwise, the compiler may infer a different value for `MaxLen` (such as zero), which +will likely change the result of the program. This behavior is set to become an error in future +versions instead. + +Example: + +```rust +// Create a mapping from Fields to u32s with a maximum length of 12 +// using a pedersen hash +let mut map: HashMap> = HashMap::default(); + +map.insert(1, 2); +map.insert(3, 4); + +let two = map.get(1).unwrap(); +``` + +## Methods + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Creates a fresh, empty HashMap. + +When using this function, always make sure to specify the maximum size of the hash map. + +This is the same `default` from the `Default` implementation given further below. It is +repeated here for convenience since it is the recommended way to create a hashmap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L202-L205 + + +Because `HashMap` has so many generic arguments that are likely to be the same throughout +your program, it may be helpful to create a type alias: + +```rust title="type_alias" showLineNumbers +type MyMap = HashMap>; +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L196-L198 + + +### with_hasher + +```rust title="with_hasher" showLineNumbers +pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L82-L86 + + +Creates a hashmap with an existing `BuildHasher`. This can be used to ensure multiple +hashmaps are created with the same hasher instance. + +Example: + +```rust title="with_hasher_example" showLineNumbers +let my_hasher: BuildHasherDefault = Default::default(); + let hashmap: HashMap> = HashMap::with_hasher(my_hasher); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L207-L211 + + +### get + +```rust title="get" showLineNumbers +pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L278-L287 + + +Retrieves a value from the hashmap, returning `Option::none()` if it was not found. + +Example: + +```rust title="get_example" showLineNumbers +fn get_example(map: HashMap>) { + let x = map.get(12); + + if x.is_some() { + assert(x.unwrap() == 42); + } +} +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L299-L307 + + +### insert + +```rust title="insert" showLineNumbers +pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L313-L323 + + +Inserts a new key-value pair into the map. If the key was already in the map, its +previous value will be overridden with the newly provided one. + +Example: + +```rust title="insert_example" showLineNumbers +let mut map: HashMap> = HashMap::default(); + map.insert(12, 42); + assert(map.len() == 1); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L213-L217 + + +### remove + +```rust title="remove" showLineNumbers +pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L356-L365 + + +Removes the given key-value pair from the map. If the key was not already present +in the map, this does nothing. + +Example: + +```rust title="remove_example" showLineNumbers +map.remove(12); + assert(map.is_empty()); + + // If a key was not present in the map, remove does nothing + map.remove(12); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L221-L228 + + +### is_empty + +```rust title="is_empty" showLineNumbers +pub fn is_empty(self) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L115-L117 + + +True if the length of the hash map is empty. + +Example: + +```rust title="is_empty_example" showLineNumbers +assert(map.is_empty()); + + map.insert(1, 2); + assert(!map.is_empty()); + + map.remove(1); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L230-L238 + + +### len + +```rust title="len" showLineNumbers +pub fn len(self) -> u64 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L264-L266 + + +Returns the current length of this hash map. + +Example: + +```rust title="len_example" showLineNumbers +// This is equivalent to checking map.is_empty() + assert(map.len() == 0); + + map.insert(1, 2); + map.insert(3, 4); + map.insert(5, 6); + assert(map.len() == 3); + + // 3 was already present as a key in the hash map, so the length is unchanged + map.insert(3, 7); + assert(map.len() == 3); + + map.remove(1); + assert(map.len() == 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L240-L255 + + +### capacity + +```rust title="capacity" showLineNumbers +pub fn capacity(_self: Self) -> u64 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L271-L273 + + +Returns the maximum capacity of this hashmap. This is always equal to the capacity +specified in the hashmap's type. + +Unlike hashmaps in general purpose programming languages, hashmaps in Noir have a +static capacity that does not increase as the map grows larger. Thus, this capacity +is also the maximum possible element count that can be inserted into the hashmap. +Due to hash collisions (modulo the hashmap length), it is likely the actual maximum +element count will be lower than the full capacity. + +Example: + +```rust title="capacity_example" showLineNumbers +let empty_map: HashMap> = HashMap::default(); + assert(empty_map.len() == 0); + assert(empty_map.capacity() == 42); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L257-L261 + + +### clear + +```rust title="clear" showLineNumbers +pub fn clear(&mut self) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L93-L95 + + +Clears the hashmap, removing all key-value pairs from it. + +Example: + +```rust title="clear_example" showLineNumbers +assert(!map.is_empty()); + map.clear(); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L263-L267 + + +### contains_key + +```rust title="contains_key" showLineNumbers +pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L101-L110 + + +True if the hashmap contains the given key. Unlike `get`, this will not also return +the value associated with the key. + +Example: + +```rust title="contains_key_example" showLineNumbers +if map.contains_key(7) { + let value = map.get(7); + assert(value.is_some()); + } else { + println("No value for key 7!"); + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L269-L276 + + +### entries + +```rust title="entries" showLineNumbers +pub fn entries(self) -> BoundedVec<(K, V), N> { +``` +> Source code: noir_stdlib/src/collections/map.nr#L123-L125 + + +Returns a vector of each key-value pair present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="entries_example" showLineNumbers +let entries = map.entries(); + + // The length of a hashmap may not be compile-time known, so we + // need to loop over its capacity instead + for i in 0..map.capacity() { + if i < entries.len() { + let (key, value) = entries.get(i); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L310-L321 + + +### keys + +```rust title="keys" showLineNumbers +pub fn keys(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L144-L146 + + +Returns a vector of each key present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="keys_example" showLineNumbers +let keys = map.keys(); + + for i in 0..keys.max_len() { + if i < keys.len() { + let key = keys.get_unchecked(i); + let value = map.get(key).unwrap_unchecked(); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L323-L333 + + +### values + +```rust title="values" showLineNumbers +pub fn values(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L164-L166 + + +Returns a vector of each value present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="values_example" showLineNumbers +let values = map.values(); + + for i in 0..values.max_len() { + if i < values.len() { + let value = values.get_unchecked(i); + println(f"Found value {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L335-L344 + + +### iter_mut + +```rust title="iter_mut" showLineNumbers +pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L183-L192 + + +Iterates through each key-value pair of the HashMap, setting each key-value pair to the +result returned from the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If this is not desired, use `iter_values_mut` if only values need to be mutated, +or `entries` if neither keys nor values need to be mutated. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_mut_example" showLineNumbers +// Add 1 to each key in the map, and double the value associated with that key. + map.iter_mut(|k, v| (k + 1, v * 2)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L348-L351 + + +### iter_keys_mut + +```rust title="iter_keys_mut" showLineNumbers +pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L208-L217 + + +Iterates through the HashMap, mutating each key to the result returned from +the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If only iteration is desired and the keys are not intended to be mutated, +prefer using `entries` instead. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_keys_mut_example" showLineNumbers +// Double each key, leaving the value associated with that key untouched + map.iter_keys_mut(|k| k * 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L353-L356 + + +### iter_values_mut + +```rust title="iter_values_mut" showLineNumbers +pub fn iter_values_mut(&mut self, f: fn(V) -> V) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L233-L235 + + +Iterates through the HashMap, applying the given function to each value and mutating the +value to equal the result. This function is more efficient than `iter_mut` and `iter_keys_mut` +because the keys are untouched and the underlying hashmap thus does not need to be reordered. + +Example: + +```rust title="iter_values_mut_example" showLineNumbers +// Halve each value + map.iter_values_mut(|v| v / 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L358-L361 + + +### retain + +```rust title="retain" showLineNumbers +pub fn retain(&mut self, f: fn(K, V) -> bool) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L247-L249 + + +Retains only the key-value pairs for which the given function returns true. +Any key-value pairs for which the function returns false will be removed from the map. + +Example: + +```rust title="retain_example" showLineNumbers +map.retain(|k, v| (k != 0) & (v != 0)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L281-L283 + + +## Trait Implementations + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Constructs an empty HashMap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L202-L205 + + +### eq + +```rust title="eq" showLineNumbers +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L426-L435 + + +Checks if two HashMaps are equal. + +Example: + +```rust title="eq_example" showLineNumbers +let mut map1: HashMap> = HashMap::default(); + let mut map2: HashMap> = HashMap::default(); + + map1.insert(1, 2); + map1.insert(3, 4); + + map2.insert(3, 4); + map2.insert(1, 2); + + assert(map1 == map2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L285-L296 + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/index.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/index.md new file mode 100644 index 00000000000..ea84c6d5c21 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/index.md @@ -0,0 +1,5 @@ +--- +title: Containers +description: Container types provided by Noir's standard library for storing and retrieving data +keywords: [containers, data types, vec, hashmap] +--- diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/vec.mdx b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/vec.mdx new file mode 100644 index 00000000000..1954f05bc76 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/containers/vec.mdx @@ -0,0 +1,151 @@ +--- +title: Vectors +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. + +Example: + +```rust +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/_category_.json b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 00000000000..d2b42d67b7c --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..4bf09cef178 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,60 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust title="ecdsa_secp256k1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust title="ecdsa_secp256r1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/eddsa.mdx b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 00000000000..c2c0624dfad --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,37 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + +It is also possible to specify the hash algorithm used for the signature by using the `eddsa_verify_with_hasher` function with a parameter implementing the Hasher trait. For instance, if you want to use Poseidon2 instead, you can do the following: +```rust +use dep::std::hash::poseidon2::Poseidon2Hasher; + +let mut hasher = Poseidon2Hasher::default(); +eddsa_verify_with_hasher(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg, &mut hasher); +``` + + + +## eddsa::eddsa_to_pub + +Private to public key conversion. + +Returns `(pub_key_x, pub_key_y)` + +```rust +fn eddsa_to_pub(secret : Field) -> (Field, Field) +``` + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 00000000000..119d8ccc70e --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,251 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust title="sha256" showLineNumbers +pub fn sha256(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L9-L11 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust title="blake2s" showLineNumbers +pub fn blake2s(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L15-L17 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## blake3 + +Given an array of bytes, returns an array with the Blake3 hash + +```rust title="blake3" showLineNumbers +pub fn blake3(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L21-L23 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake3(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust title="pedersen_hash" showLineNumbers +pub fn pedersen_hash(input: [Field; N]) -> Field +``` +> Source code: noir_stdlib/src/hash.nr#L46-L48 + + +example: + +```rust title="pedersen-hash" showLineNumbers +use dep::std; + +fn main(x: Field, y: Field, expected_hash: Field) { + let hash = std::hash::pedersen_hash([x, y]); + assert_eq(hash, expected_hash); +} +``` +> Source code: test_programs/execution_success/pedersen_hash/src/main.nr#L1-L8 + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust title="pedersen_commitment" showLineNumbers +struct PedersenPoint { + x : Field, + y : Field, +} + +pub fn pedersen_commitment(input: [Field; N]) -> PedersenPoint +``` +> Source code: noir_stdlib/src/hash.nr#L26-L33 + + +example: + +```rust title="pedersen-commitment" showLineNumbers +use dep::std; + +fn main(x: Field, y: Field, expected_commitment: std::hash::PedersenPoint) { + let commitment = std::hash::pedersen_commitment([x, y]); + assert_eq(commitment.x, expected_commitment.x); + assert_eq(commitment.y, expected_commitment.y); +} +``` +> Source code: test_programs/execution_success/pedersen_commitment/src/main.nr#L1-L9 + + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust title="keccak256" showLineNumbers +pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L71-L73 + + +example: + +```rust title="keccak256" showLineNumbers +use dep::std; + +fn main(x: Field, result: [u8; 32]) { + // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field + // The padding is taken care of by the program + let digest = std::hash::keccak256([x as u8], 1); + assert(digest == result); + + //#1399: variable message size + let message_size = 4; + let hash_a = std::hash::keccak256([1, 2, 3, 4], message_size); + let hash_b = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size); + + assert(hash_a == hash_b); + + let message_size_big = 8; + let hash_c = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size_big); + + assert(hash_a != hash_c); +} +``` +> Source code: test_programs/execution_success/keccak256/src/main.nr#L1-L22 + + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust title="poseidon" showLineNumbers +use dep::std::hash::poseidon; +use dep::std::hash::poseidon2; + +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { + let hash1 = poseidon::bn254::hash_2(x1); + assert(hash1 == y1); + + let hash2 = poseidon::bn254::hash_4(x2); + assert(hash2 == y2); + + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len()); + assert(hash3 == y3); +} +``` +> Source code: test_programs/execution_success/poseidon_bn254_hash/src/main.nr#L1-L15 + + +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +The above example for Poseidon also includes Poseidon2. + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/index.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 00000000000..650f30165d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/scalar.mdx b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/scalar.mdx new file mode 100644 index 00000000000..df411ca5443 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/scalar.mdx @@ -0,0 +1,33 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust title="fixed_base_embedded_curve" showLineNumbers +pub fn fixed_base_embedded_curve( + low: Field, + high: Field +) -> [Field; 2] +``` +> Source code: noir_stdlib/src/scalar_mul.nr#L27-L32 + + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + println(scal); +} +``` + + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/schnorr.mdx b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 00000000000..ae12e6c12dc --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,45 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust title="schnorr_verify" showLineNumbers +pub fn verify_signature( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L2-L9 + + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/logging.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/logging.md new file mode 100644 index 00000000000..db75ef9f86f --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/logging.md @@ -0,0 +1,78 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +struct Person { + age: Field, + height: Field, +} + +fn main(age: Field, height: Field) { + let person = Person { + age: age, + height: height, + }; + println(person); + println(age + height); + println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + println(fmt_str); + + let s = myStruct { y: x, x: y }; + println(s); + + println(f"i: {i}, s: {s}"); + + println(x); + println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + println(f"s: {s}, foo: {foo}"); + + println(15); // prints 0x0f, implicit Field + println(-1 as u8); // prints 255 + println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +println(person); +print(person); + +println("Hello world!"); // Prints with a newline at the end of the input +print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/merkle_trees.md new file mode 100644 index 00000000000..fa488677884 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/options.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/options.md new file mode 100644 index 00000000000..a1bd4e1de5f --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/options.md @@ -0,0 +1,101 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +The `Option` type, already imported into your Noir program, can be used directly: + +```rust +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### expect + +Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value. The custom message is expected to be a format string. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/recursion.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/recursion.md new file mode 100644 index 00000000000..9337499dac8 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/recursion.md @@ -0,0 +1,88 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) + +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + +```rust +#[foreign(verify_proof)] +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Example usage + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 93], + public_inputs : [Field; 1], + key_hash : Field, + proof_b : [Field; 93], +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} +``` + +You can see a full example of recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/traits.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/traits.md new file mode 100644 index 00000000000..ba9fa2ee841 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/traits.md @@ -0,0 +1,399 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust title="default-trait" showLineNumbers +trait Default { + fn default() -> Self; +} +``` +> Source code: noir_stdlib/src/default.nr#L1-L5 + + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type. + + +## `std::convert` + +### `std::convert::From` + +```rust title="from-trait" showLineNumbers +trait From { + fn from(input: T) -> Self; +} +``` +> Source code: noir_stdlib/src/convert.nr#L1-L5 + + +The `From` trait defines how to convert from a given type `T` to the type on which the trait is implemented. + +The Noir standard library provides a number of implementations of `From` between primitive types. +```rust title="from-impls" showLineNumbers +// Unsigned integers + +impl From for u32 { fn from(value: u8) -> u32 { value as u32 } } + +impl From for u64 { fn from(value: u8) -> u64 { value as u64 } } +impl From for u64 { fn from(value: u32) -> u64 { value as u64 } } + +impl From for Field { fn from(value: u8) -> Field { value as Field } } +impl From for Field { fn from(value: u32) -> Field { value as Field } } +impl From for Field { fn from(value: u64) -> Field { value as Field } } + +// Signed integers + +impl From for i32 { fn from(value: i8) -> i32 { value as i32 } } + +impl From for i64 { fn from(value: i8) -> i64 { value as i64 } } +impl From for i64 { fn from(value: i32) -> i64 { value as i64 } } + +// Booleans +impl From for u8 { fn from(value: bool) -> u8 { value as u8 } } +impl From for u32 { fn from(value: bool) -> u32 { value as u32 } } +impl From for u64 { fn from(value: bool) -> u64 { value as u64 } } +impl From for i8 { fn from(value: bool) -> i8 { value as i8 } } +impl From for i32 { fn from(value: bool) -> i32 { value as i32 } } +impl From for i64 { fn from(value: bool) -> i64 { value as i64 } } +impl From for Field { fn from(value: bool) -> Field { value as Field } } +``` +> Source code: noir_stdlib/src/convert.nr#L25-L52 + + +#### When to implement `From` + +As a general rule of thumb, `From` may be implemented in the [situations where it would be suitable in Rust](https://doc.rust-lang.org/std/convert/trait.From.html#when-to-implement-from): + +- The conversion is *infallible*: Noir does not provide an equivalent to Rust's `TryFrom`, if the conversion can fail then provide a named method instead. +- The conversion is *lossless*: semantically, it should not lose or discard information. For example, `u32: From` can losslessly convert any `u16` into a valid `u32` such that the original `u16` can be recovered. On the other hand, `u16: From` should not be implemented as `2**16` is a `u32` which cannot be losslessly converted into a `u16`. +- The conversion is *value-preserving*: the conceptual kind and meaning of the resulting value is the same, even though the Noir type and technical representation might be different. While it's possible to infallibly and losslessly convert a `u8` into a `str<2>` hex representation, `4u8` and `"04"` are too different for `str<2>: From` to be implemented. +- The conversion is *obvious*: it's the only reasonable conversion between the two types. If there's ambiguity on how to convert between them such that the same input could potentially map to two different values then a named method should be used. For instance rather than implementing `U128: From<[u8; 16]>`, the methods `U128::from_le_bytes` and `U128::from_be_bytes` are used as otherwise the endianness of the array would be ambiguous, resulting in two potential values of `U128` from the same byte array. + +One additional recommendation specific to Noir is: +- The conversion is *efficient*: it's relatively cheap to convert between the two types. Due to being a ZK DSL, it's more important to avoid unnecessary computation compared to Rust. If the implementation of `From` would encourage users to perform unnecessary conversion, resulting in additional proving time, then it may be preferable to expose functionality such that this conversion may be avoided. + +### `std::convert::Into` + +The `Into` trait is defined as the reciprocal of `From`. It should be easy to convince yourself that if we can convert to type `A` from type `B`, then it's possible to convert type `B` into type `A`. + +For this reason, implementing `From` on a type will automatically generate a matching `Into` implementation. One should always prefer implementing `From` over `Into` as implementing `Into` will not generate a matching `From` implementation. + +```rust title="into-trait" showLineNumbers +trait Into { + fn into(input: Self) -> T; +} + +impl Into for U where T: From { + fn into(input: U) -> T { + T::from(input) + } +} +``` +> Source code: noir_stdlib/src/convert.nr#L13-L23 + + +`Into` is most useful when passing function arguments where the types don't quite match up with what the function expects. In this case, the compiler has enough type information to perform the necessary conversion by just appending `.into()` onto the arguments in question. + + +## `std::cmp` + +### `std::cmp::Eq` + +```rust title="eq-trait" showLineNumbers +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L1-L5 + + +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Ord` + +```rust title="ord-trait" showLineNumbers +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L92-L96 + + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust title="add-trait" showLineNumbers +trait Add { + fn add(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L1-L5 + +```rust title="sub-trait" showLineNumbers +trait Sub { + fn sub(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L17-L21 + +```rust title="mul-trait" showLineNumbers +trait Mul { + fn mul(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L33-L37 + +```rust title="div-trait" showLineNumbers +trait Div { + fn div(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L49-L53 + + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust title="rem-trait" showLineNumbers +trait Rem{ + fn rem(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L65-L69 + + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust title="bitor-trait" showLineNumbers +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L79-L83 + +```rust title="bitand-trait" showLineNumbers +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L95-L99 + +```rust title="bitxor-trait" showLineNumbers +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L111-L115 + + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust title="shl-trait" showLineNumbers +trait Shl { + fn shl(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L127-L131 + +```rust title="shr-trait" showLineNumbers +trait Shr { + fn shr(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L142-L146 + + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` diff --git a/docs/versioned_docs/version-v0.25.0/noir/standard_library/zeroed.md b/docs/versioned_docs/version-v0.25.0/noir/standard_library/zeroed.md new file mode 100644 index 00000000000..97dab02dac2 --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/noir/standard_library/zeroed.md @@ -0,0 +1,25 @@ +--- +title: Zeroed Function +description: + The zeroed function returns a zeroed value of any type. +keywords: + [ + zeroed + ] +--- + +Implements `fn zeroed() -> T` to return a zeroed value of any type. This function is generally unsafe to use as the zeroed bit pattern is not guaranteed to be valid for all types. It can however, be useful in cases when the value is guaranteed not to be used such as in a BoundedVec library implementing a growable vector, up to a certain length, backed by an array. The array can be initialized with zeroed values which are guaranteed to be inaccessible until the vector is pushed to. Similarly, enumerations in noir can be implemented using this method by providing zeroed values for the unused variants. + +You can access the function at `std::unsafe::zeroed`. + +This function currently supports the following types: + +- Field +- Bool +- Uint +- Array +- String +- Tuple +- Function + +Using it on other types could result in unexpected behavior. diff --git a/docs/versioned_docs/version-v0.25.0/reference/_category_.json b/docs/versioned_docs/version-v0.25.0/reference/_category_.json new file mode 100644 index 00000000000..5b6a20a609a --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.25.0/reference/nargo_commands.md b/docs/versioned_docs/version-v0.25.0/reference/nargo_commands.md new file mode 100644 index 00000000000..8a309ef4e7e --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/reference/nargo_commands.md @@ -0,0 +1,380 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +# Command-Line Help for `nargo` + +This document contains the help content for the `nargo` command-line program. + +**Command Overview:** + +* [`nargo`↴](#nargo) +* [`nargo backend`↴](#nargo-backend) +* [`nargo backend current`↴](#nargo-backend-current) +* [`nargo backend ls`↴](#nargo-backend-ls) +* [`nargo backend use`↴](#nargo-backend-use) +* [`nargo backend install`↴](#nargo-backend-install) +* [`nargo backend uninstall`↴](#nargo-backend-uninstall) +* [`nargo check`↴](#nargo-check) +* [`nargo fmt`↴](#nargo-fmt) +* [`nargo codegen-verifier`↴](#nargo-codegen-verifier) +* [`nargo compile`↴](#nargo-compile) +* [`nargo new`↴](#nargo-new) +* [`nargo init`↴](#nargo-init) +* [`nargo execute`↴](#nargo-execute) +* [`nargo prove`↴](#nargo-prove) +* [`nargo verify`↴](#nargo-verify) +* [`nargo test`↴](#nargo-test) +* [`nargo info`↴](#nargo-info) +* [`nargo lsp`↴](#nargo-lsp) + +## `nargo` + +Noir's package manager + +**Usage:** `nargo ` + +###### **Subcommands:** + +* `backend` — Install and select custom backends used to generate and verify proofs +* `check` — Checks the constraint system for errors +* `fmt` — Format the Noir files in a workspace +* `codegen-verifier` — Generates a Solidity verifier smart contract for the program +* `compile` — Compile the program and its secret execution trace into ACIR format +* `new` — Create a Noir project in a new directory +* `init` — Create a Noir project in the current directory +* `execute` — Executes a circuit to calculate its return value +* `prove` — Create proof for this program. The proof is returned as a hex encoded string +* `verify` — Given a proof and a program, verify whether the proof is valid +* `test` — Run the tests for this program +* `info` — Provides detailed information on a circuit +* `lsp` — Starts the Noir LSP server + +###### **Options:** + + + + +## `nargo backend` + +Install and select custom backends used to generate and verify proofs + +**Usage:** `nargo backend ` + +###### **Subcommands:** + +* `current` — Prints the name of the currently active backend +* `ls` — Prints the list of currently installed backends +* `use` — Select the backend to use +* `install` — Install a new backend from a URL +* `uninstall` — Uninstalls a backend + + + +## `nargo backend current` + +Prints the name of the currently active backend + +**Usage:** `nargo backend current` + + + +## `nargo backend ls` + +Prints the list of currently installed backends + +**Usage:** `nargo backend ls` + + + +## `nargo backend use` + +Select the backend to use + +**Usage:** `nargo backend use ` + +###### **Arguments:** + +* `` + + + +## `nargo backend install` + +Install a new backend from a URL + +**Usage:** `nargo backend install ` + +###### **Arguments:** + +* `` — The name of the backend to install +* `` — The URL from which to download the backend + + + +## `nargo backend uninstall` + +Uninstalls a backend + +**Usage:** `nargo backend uninstall ` + +###### **Arguments:** + +* `` — The name of the backend to uninstall + + + +## `nargo check` + +Checks the constraint system for errors + +**Usage:** `nargo check [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to check +* `--workspace` — Check all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo fmt` + +Format the Noir files in a workspace + +**Usage:** `nargo fmt [OPTIONS]` + +###### **Options:** + +* `--check` — Run noirfmt in check mode + + + +## `nargo codegen-verifier` + +Generates a Solidity verifier smart contract for the program + +**Usage:** `nargo codegen-verifier [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to codegen +* `--workspace` — Codegen all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo compile` + +Compile the program and its secret execution trace into ACIR format + +**Usage:** `nargo compile [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to compile +* `--workspace` — Compile all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo new` + +Create a Noir project in a new directory + +**Usage:** `nargo new [OPTIONS] ` + +###### **Arguments:** + +* `` — The path to save the new project + +###### **Options:** + +* `--name ` — Name of the package [default: package directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo init` + +Create a Noir project in the current directory + +**Usage:** `nargo init [OPTIONS]` + +###### **Options:** + +* `--name ` — Name of the package [default: current directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo execute` + +Executes a circuit to calculate its return value + +**Usage:** `nargo execute [OPTIONS] [WITNESS_NAME]` + +###### **Arguments:** + +* `` — Write the execution witness to named file + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `--package ` — The name of the package to execute +* `--workspace` — Execute all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo prove` + +Create proof for this program. The proof is returned as a hex encoded string + +**Usage:** `nargo prove [OPTIONS]` + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `-v`, `--verifier-name ` — The name of the toml file which contains the inputs for the verifier + + Default value: `Verifier` +* `--verify` — Verify proof after proving +* `--package ` — The name of the package to prove +* `--workspace` — Prove all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid + +**Usage:** `nargo verify [OPTIONS]` + +###### **Options:** + +* `-v`, `--verifier-name ` — The name of the toml file which contains the inputs for the verifier + + Default value: `Verifier` +* `--package ` — The name of the package verify +* `--workspace` — Verify all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo test` + +Run the tests for this program + +**Usage:** `nargo test [OPTIONS] [TEST_NAME]` + +###### **Arguments:** + +* `` — If given, only tests with names containing this string will be run + +###### **Options:** + +* `--show-output` — Display output of `println` statements +* `--exact` — Only run tests that match exactly +* `--package ` — The name of the package to test +* `--workspace` — Test all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo info` + +Provides detailed information on a circuit + +Current information provided: 1. The number of ACIR opcodes 2. Counts the final number gates in the circuit used by a backend + +**Usage:** `nargo info [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to detail +* `--workspace` — Detail all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo lsp` + +Starts the Noir LSP server + +Starts an LSP server which allows IDEs such as VS Code to display diagnostics in Noir source. + +VS Code Noir Language Support: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir + +**Usage:** `nargo lsp` + + + +
+ + + This document was generated automatically by + clap-markdown. + + diff --git a/docs/versioned_docs/version-v0.25.0/tutorials/noirjs_app.md b/docs/versioned_docs/version-v0.25.0/tutorials/noirjs_app.md new file mode 100644 index 00000000000..ad76dd255cc --- /dev/null +++ b/docs/versioned_docs/version-v0.25.0/tutorials/noirjs_app.md @@ -0,0 +1,279 @@ +--- +title: Building a web app with NoirJS +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment. +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs, app] +sidebar_position: 0 +pagination_next: noir/concepts/data_types/index +--- + +NoirJS is a set of packages meant to work both in a browser and a server environment. In this tutorial, we will build a simple web app using them. From here, you should get an idea on how to proceed with your own Noir projects! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Setup + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.19.x matches `noir_js@0.19.x`, etc. + +In this guide, we will be pinned to 0.19.4. + +::: + +Before we start, we want to make sure we have Node and Nargo installed. + +We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). + +As for `Nargo`, we can follow the the [Nargo guide](../getting_started/installation/index.md) to install it. If you're lazy, just paste this on a terminal and run `noirup`: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Easy enough. Onwards! + +## Our project + +ZK is a powerful technology. An app that doesn't reveal one of the inputs to *anyone* is almost unbelievable, yet Noir makes it as easy as a single line of code. + +In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! + +### Nargo + +Run: + +```nargo new circuit``` + +And... That's about it. Your program is ready to be compiled and run. + +To compile, let's `cd` into the `circuit` folder to enter our project, and call: + +```nargo compile``` + +This compiles our circuit into `json` format and add it to a new `target` folder. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit <---- our working directory + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +::: + +### Node and Vite + +If you want to explore Nargo, feel free to go on a side-quest now and follow the steps in the +[getting started](../getting_started/hello_noir/index.md) guide. However, we want our app to run on the browser, so we need Vite. + +Vite is a powerful tool to generate static websites. While it provides all kinds of features, let's just go barebones with some good old vanilla JS. + +To do this this, go back to the previous folder (`cd ..`) and create a new vite project by running `npm create vite` and choosing "Vanilla" and "Javascript". + +You should see `vite-project` appear in your root folder. This seems like a good time to `cd` into it and install our NoirJS packages: + +```bash +npm i @noir-lang/backend_barretenberg@0.19.4 @noir-lang/noir_js@0.19.4 +``` + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...etc... +└── vite-project <---- our working directory + └── ...etc... +``` + +::: + +#### Some cleanup + +`npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `index.html`, `main.js` and `package.json`. I feel lighter already. + +![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) + +## HTML + +Our app won't run like this, of course. We need some working HTML, at least. Let's open our broken-hearted `index.html` and replace everything with this code snippet: + +```html + + + + + + +

Noir app

+
+ + +
+
+

Logs

+

Proof

+
+ + +``` + +It *could* be a beautiful UI... Depending on which universe you live in. + +## Some good old vanilla Javascript + +Our love for Noir needs undivided attention, so let's just open `main.js` and delete everything (this is where the romantic scenery becomes a bit creepy). + +Start by pasting in this boilerplate code: + +```js +const setup = async () => { + await Promise.all([ + import("@noir-lang/noirc_abi").then(module => + module.default(new URL("@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm", import.meta.url).toString()) + ), + import("@noir-lang/acvm_js").then(module => + module.default(new URL("@noir-lang/acvm_js/web/acvm_js_bg.wasm", import.meta.url).toString()) + ) + ]); +} + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} + +document.getElementById('submitGuess').addEventListener('click', async () => { + try { + // here's where love happens + } catch(err) { + display("logs", "Oh 💔 Wrong guess") + } +}); + +``` + +The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 + +As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...same as above +└── vite-project + ├── main.js + ├── package.json + └── index.html +``` + +You'll see other files and folders showing up (like `package-lock.json`, `node_modules`) but you shouldn't have to care about those. + +::: + +## Some NoirJS + +We're starting with the good stuff now. If you've compiled the circuit as described above, you should have a `json` file we want to import at the very top of our `main.js` file: + +```ts +import circuit from '../circuit/target/circuit.json'; +``` + +[Noir is backend-agnostic](../index.mdx#whats-new-about-noir). We write Noir, but we also need a proving backend. That's why we need to import and instantiate the two dependencies we installed above: `BarretenbergBackend` and `Noir`. Let's import them right below: + +```js +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +And instantiate them inside our try-catch block: + +```ts +// try { +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +// } +``` + +:::note + +For the remainder of the tutorial, everything will be happening inside the `try` block + +::: + +## Our app + +Now for the app itself. We're capturing whatever is in the input when people press the submit button. Just add this: + +```js +const x = parseInt(document.getElementById('guessInput').value); +const input = { x, y: 2 }; +``` + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +await setup(); // let's squeeze our wasm inits here + +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm run dev`. If it doesn't open a browser for you, just visit `localhost:5173`. You should now see the worst UI ever, with an ugly input. + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +Now, our circuit says `fn main(x: Field, y: pub Field)`. This means only the `y` value is public, and it's hardcoded above: `input = { x, y: 2 }`. In other words, you won't need to send your secret`x` to the verifier! + +By inputting any number other than 2 in the input box and clicking "submit", you should get a valid proof. Otherwise the proof won't even generate correctly. By the way, if you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human ❤️. + +## Verifying + +Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add these lines to see our proof being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +You have successfully generated a client-side Noir web app! + +![coded app without math knowledge](@site/static/img/memes/flextape.jpeg) + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_sidebars/version-v0.25.0-sidebars.json b/docs/versioned_sidebars/version-v0.25.0-sidebars.json new file mode 100644 index 00000000000..b16f79cc176 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.25.0-sidebars.json @@ -0,0 +1,83 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "The Noir Language", + "items": [ + { + "type": "autogenerated", + "dirName": "noir" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "category", + "label": "How To Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "how_to" + } + ] + }, + { + "type": "category", + "label": "Explainers", + "items": [ + { + "type": "autogenerated", + "dirName": "explainers" + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "autogenerated", + "dirName": "tutorials" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "reference" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/flake.nix b/flake.nix index 5125dad06be..785d5aead56 100644 --- a/flake.nix +++ b/flake.nix @@ -73,7 +73,7 @@ # Configuration shared between builds config = { # x-release-please-start-version - version = "0.24.0"; + version = "0.25.0"; # x-release-please-end src = pkgs.lib.cleanSourceWith { diff --git a/noir_stdlib/src/bigint.nr b/noir_stdlib/src/bigint.nr index 98237a54779..e7fc28ba39b 100644 --- a/noir_stdlib/src/bigint.nr +++ b/noir_stdlib/src/bigint.nr @@ -1,6 +1,7 @@ use crate::ops::{Add, Sub, Mul, Div}; use crate::cmp::Eq; +// docs:start:curve_order_base global bn254_fq = [0x47, 0xFD, 0x7C, 0xD8, 0x16, 0x8C, 0x20, 0x3C, 0x8d, 0xca, 0x71, 0x68, 0x91, 0x6a, 0x81, 0x97, 0x5d, 0x58, 0x81, 0x81, 0xb6, 0x45, 0x50, 0xb8, 0x29, 0xa0, 0x31, 0xe1, 0x72, 0x4e, 0x64, 0x30]; global bn254_fr = [0x01, 0x00, 0x00, 0x00, 0x3F, 0x59, 0x1F, 0x43, 0x09, 0x97, 0xB9, 0x79, 0x48, 0xE8, 0x33, 0x28, @@ -13,6 +14,7 @@ global secpr1_fq = [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF]; global secpr1_fr = [0x51, 0x25, 0x63, 0xFC, 0xC2, 0xCA, 0xB9, 0xF3, 0x84, 0x9E, 0x17, 0xA7, 0xAD, 0xFA, 0xE6, 0xBC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,0xFF, 0xFF, 0xFF, 0xFF]; +// docs:end:curve_order_base struct BigInt { pointer: u32, diff --git a/noir_stdlib/src/collections/map.nr b/noir_stdlib/src/collections/map.nr index 056299b4238..2d76acf1f3a 100644 --- a/noir_stdlib/src/collections/map.nr +++ b/noir_stdlib/src/collections/map.nr @@ -3,6 +3,7 @@ use crate::collections::vec::Vec; use crate::option::Option; use crate::default::Default; use crate::hash::{Hash, Hasher, BuildHasher}; +use crate::collections::bounded_vec::BoundedVec; // We use load factor α_max = 0.75. // Upon exceeding it, assert will fail in order to inform the user @@ -78,21 +79,26 @@ impl Slot { // it is very unlikely to be after - performance will be heavily degraded. impl HashMap { // Creates a new instance of HashMap with specified BuildHasher. + // docs:start:with_hasher pub fn with_hasher(_build_hasher: B) -> Self where B: BuildHasher { + // docs:end:with_hasher let _table = [Slot::default(); N]; let _len = 0; Self { _table, _len, _build_hasher } } // Clears the map, removing all key-value entries. + // docs:start:clear pub fn clear(&mut self) { + // docs:end:clear self._table = [Slot::default(); N]; self._len = 0; } // Returns true if the map contains a value for the specified key. + // docs:start:contains_key pub fn contains_key( self, key: K @@ -101,89 +107,80 @@ impl HashMap { K: Hash + Eq, B: BuildHasher, H: Hasher { + // docs:end:contains_key self.get(key).is_some() } // Returns true if the map contains no elements. + // docs:start:is_empty pub fn is_empty(self) -> bool { + // docs:end:is_empty self._len == 0 } - // Get the Option<(K, V) array of valid entries - // with a length of map capacity. First len() elements - // are safe to unwrap_unchecked(), whilst remaining - // are guaranteed to be Option::none(). - // - // This design is reasoned by compile-time limitations and - // temporary nested slices ban. - pub fn entries(self) -> [Option<(K, V)>; N] { - let mut entries = [Option::none(); N]; - let mut valid_amount = 0; + // Returns a BoundedVec of all valid entries in this HashMap. + // The length of the returned vector will always match the length of this HashMap. + // docs:start:entries + pub fn entries(self) -> BoundedVec<(K, V), N> { + // docs:end:entries + let mut entries = BoundedVec::new(); for slot in self._table { if slot.is_valid() { - entries[valid_amount] = slot.key_value(); - valid_amount += 1; + // SAFETY: slot.is_valid() should ensure there is a valid key-value pairing here + let key_value = slot.key_value().unwrap_unchecked(); + entries.push(key_value); } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; - assert(valid_amount == self._len, msg); + let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + assert(entries.len() == self._len, msg); entries } - // Get the Option array of valid keys - // with a length of map capacity. First len() elements - // are safe to unwrap_unchecked(), whilst remaining - // are guaranteed to be Option::none(). - // - // This design is reasoned by compile-time limitations and - // temporary nested slices ban. - pub fn keys(self) -> [Option; N] { - let mut keys = [Option::none(); N]; - let mut valid_amount = 0; + // Returns a BoundedVec containing all the keys within this HashMap. + // The length of the returned vector will always match the length of this HashMap. + // docs:start:keys + pub fn keys(self) -> BoundedVec { + // docs:end:keys + let mut keys = BoundedVec::new(); for slot in self._table { if slot.is_valid() { let (key, _) = slot.key_value_unchecked(); - keys[valid_amount] = Option::some(key); - valid_amount += 1; + keys.push(key); } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; - assert(valid_amount == self._len, msg); + let msg = f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + assert(keys.len() == self._len, msg); keys } - // Get the Option array of valid values - // with a length of map capacity. First len() elements - // are safe to unwrap_unchecked(), whilst remaining - // are guaranteed to be Option::none(). - // - // This design is reasoned by compile-time limitations and - // temporary nested slices ban. - pub fn values(self) -> [Option; N] { - let mut values = [Option::none(); N]; - let mut valid_amount = 0; + // Returns a BoundedVec containing all the values within this HashMap. + // The length of the returned vector will always match the length of this HashMap. + // docs:start:values + pub fn values(self) -> BoundedVec { + // docs:end:values + let mut values = BoundedVec::new(); for slot in self._table { if slot.is_valid() { let (_, value) = slot.key_value_unchecked(); - values[valid_amount] = Option::some(value); - valid_amount += 1; + values.push(value); } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; - assert(valid_amount == self._len, msg); + let msg = f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + assert(values.len() == self._len, msg); values } // For each key-value entry applies mutator function. + // docs:start:iter_mut pub fn iter_mut( &mut self, f: fn(K, V) -> (K, V) @@ -192,12 +189,13 @@ impl HashMap { K: Eq + Hash, B: BuildHasher, H: Hasher { + // docs:end:iter_mut let mut entries = self.entries(); let mut new_map = HashMap::with_hasher(self._build_hasher); for i in 0..N { if i < self._len { - let entry = entries[i].unwrap_unchecked(); + let entry = entries.get_unchecked(i); let (key, value) = f(entry.0, entry.1); new_map.insert(key, value); } @@ -207,6 +205,7 @@ impl HashMap { } // For each key applies mutator function. + // docs:start:iter_keys_mut pub fn iter_keys_mut( &mut self, f: fn(K) -> K @@ -215,12 +214,13 @@ impl HashMap { K: Eq + Hash, B: BuildHasher, H: Hasher { + // docs:end:iter_keys_mut let mut entries = self.entries(); let mut new_map = HashMap::with_hasher(self._build_hasher); for i in 0..N { if i < self._len { - let entry = entries[i].unwrap_unchecked(); + let entry = entries.get_unchecked(i); let (key, value) = (f(entry.0), entry.1); new_map.insert(key, value); } @@ -230,7 +230,9 @@ impl HashMap { } // For each value applies mutator function. + // docs:start:iter_values_mut pub fn iter_values_mut(&mut self, f: fn(V) -> V) { + // docs:end:iter_values_mut for i in 0..N { let mut slot = self._table[i]; if slot.is_valid() { @@ -242,7 +244,9 @@ impl HashMap { } // Retains only the elements specified by the predicate. + // docs:start:retain pub fn retain(&mut self, f: fn(K, V) -> bool) { + // docs:end:retain for index in 0..N { let mut slot = self._table[index]; if slot.is_valid() { @@ -257,16 +261,21 @@ impl HashMap { } // Amount of active key-value entries. + // docs:start:len pub fn len(self) -> u64 { + // docs:end:len self._len } // Get the compile-time map capacity. + // docs:start:capacity pub fn capacity(_self: Self) -> u64 { + // docs:end:capacity N } // Get the value by key. If it does not exist, returns none(). + // docs:start:get pub fn get( self, key: K @@ -275,6 +284,7 @@ impl HashMap { K: Eq + Hash, B: BuildHasher, H: Hasher { + // docs:end:get let mut result = Option::none(); let hash = self.hash(key); @@ -300,6 +310,7 @@ impl HashMap { } // Insert key-value entry. In case key was already present, value is overridden. + // docs:start:insert pub fn insert( &mut self, key: K, @@ -309,6 +320,7 @@ impl HashMap { K: Eq + Hash, B: BuildHasher, H: Hasher { + // docs:end:insert self.assert_load_factor(); let hash = self.hash(key); @@ -340,7 +352,8 @@ impl HashMap { } } - // Remove key-value entry. If key is not present, HashMap remains unchanged. + // Removes a key-value entry. If key is not present, HashMap remains unchanged. + // docs:start:remove pub fn remove( &mut self, key: K @@ -349,6 +362,7 @@ impl HashMap { K: Eq + Hash, B: BuildHasher, H: Hasher { + // docs:end:remove let hash = self.hash(key); let mut break = false; @@ -409,6 +423,7 @@ impl HashMap { // Equality class on HashMap has to test that they have // equal sets of key-value entries, // thus one is a subset of the other and vice versa. +// docs:start:eq impl Eq for HashMap where K: Eq + Hash, @@ -416,7 +431,8 @@ where B: BuildHasher, H: Hasher { - fn eq(self, other: HashMap) -> bool{ + fn eq(self, other: HashMap) -> bool { +// docs:end:eq let mut equal = false; if self.len() == other.len(){ @@ -443,12 +459,14 @@ where } } +// docs:start:default impl Default for HashMap where B: BuildHasher + Default, H: Hasher + Default { - fn default() -> Self{ + fn default() -> Self { +// docs:end:default let _build_hasher = B::default(); let map: HashMap = HashMap::with_hasher(_build_hasher); map diff --git a/noir_stdlib/src/eddsa.nr b/noir_stdlib/src/eddsa.nr index 966bc1da2a1..3aff6043ffd 100644 --- a/noir_stdlib/src/eddsa.nr +++ b/noir_stdlib/src/eddsa.nr @@ -1,6 +1,8 @@ use crate::hash::poseidon; use crate::ec::consts::te::baby_jubjub; use crate::ec::tecurve::affine::Point as TEPoint; +use crate::hash::{Hash, Hasher, BuildHasher, BuildHasherDefault}; +use crate::hash::poseidon::PoseidonHasher; // Returns true if signature is valid pub fn eddsa_poseidon_verify( @@ -11,6 +13,28 @@ pub fn eddsa_poseidon_verify( signature_r8_y: Field, message: Field ) -> bool { + let mut hasher = PoseidonHasher::default(); + eddsa_verify_with_hasher( + pub_key_x, + pub_key_y, + signature_s, + signature_r8_x, + signature_r8_y, + message, + &mut hasher + ) +} + +pub fn eddsa_verify_with_hasher( + pub_key_x: Field, + pub_key_y: Field, + signature_s: Field, + signature_r8_x: Field, + signature_r8_y: Field, + message: Field, + hasher: &mut H +) -> bool +where H: Hasher { // Verifies by testing: // S * B8 = R8 + H(R8, A, m) * A8 let bjj = baby_jubjub(); @@ -23,7 +47,12 @@ pub fn eddsa_poseidon_verify( // Ensure S < Subgroup Order assert(signature_s.lt(bjj.suborder)); // Calculate the h = H(R, A, msg) - let hash: Field = poseidon::bn254::hash_5([signature_r8_x, signature_r8_y, pub_key_x, pub_key_y, message]); + signature_r8_x.hash(hasher); + signature_r8_y.hash(hasher); + pub_key_x.hash(hasher); + pub_key_y.hash(hasher); + message.hash(hasher); + let hash: Field = (*hasher).finish(); // Calculate second part of the right side: right2 = h*8*A // Multiply by 8 by doubling 3 times. This also ensures that the result is in the subgroup. let pub_key_mul_2 = bjj.curve.add(pub_key, pub_key); diff --git a/noir_stdlib/src/hash/mimc.nr b/noir_stdlib/src/hash/mimc.nr index 10c0a48917c..db8a32d7909 100644 --- a/noir_stdlib/src/hash/mimc.nr +++ b/noir_stdlib/src/hash/mimc.nr @@ -1,3 +1,6 @@ +use crate::hash::Hasher; +use crate::default::Default; + // mimc-p/p implementation // constants are (publicly generated) random numbers, for instance using keccak as a ROM. // You must use constants generated for the native field @@ -16,13 +19,8 @@ fn mimc(x: Field, k: Field, constants: [Field; N], exp: Field) -> Field { } global MIMC_BN254_ROUNDS = 91; -//mimc implementation with hardcoded parameters for BN254 curve. -#[field(bn254)] -pub fn mimc_bn254(array: [Field; N]) -> Field { - //mimc parameters - let exponent = 7; - //generated from seed "mimc" using keccak256 - let constants: [Field; MIMC_BN254_ROUNDS] = [ +//generated from seed "mimc" using keccak256 +global MIMC_BN254_CONSTANTS: [Field; MIMC_BN254_ROUNDS] = [ 0, 20888961410941983456478427210666206549300505294776164667214940546594746570981, 15265126113435022738560151911929040668591755459209400716467504685752745317193, @@ -116,10 +114,46 @@ pub fn mimc_bn254(array: [Field; N]) -> Field { 13602139229813231349386885113156901793661719180900395818909719758150455500533 ]; +//mimc implementation with hardcoded parameters for BN254 curve. +#[field(bn254)] +pub fn mimc_bn254(array: [Field; N]) -> Field { + let exponent = 7; let mut r = 0; for elem in array { - let h = mimc(elem, r, constants, exponent); + let h = mimc(elem, r, MIMC_BN254_CONSTANTS, exponent); r = r + elem + h; } r } + +struct MimcHasher{ + _state: [Field], + _len: u64, +} + +impl Hasher for MimcHasher { + #[field(bn254)] + fn finish(self) -> Field { + let exponent = 7; + let mut r = 0; + for i in 0..self._len { + let h = mimc(self._state[i], r, MIMC_BN254_CONSTANTS, exponent); + r = r + self._state[i] + h; + } + r + } + + fn write(&mut self, input: [Field]){ + self._state = self._state.append(input); + self._len += input.len(); + } +} + +impl Default for MimcHasher{ + fn default() -> Self{ + MimcHasher{ + _state: [], + _len: 0, + } + } +} diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index b1a7c4a2367..7f99ad36316 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -1,5 +1,7 @@ mod bn254; // Instantiations of Poseidon for prime field of the same order as BN254 use crate::field::modulus_num_bits; +use crate::hash::Hasher; +use crate::default::Default; struct PoseidonConfig { t: Field, // Width, i.e. state size @@ -100,3 +102,77 @@ fn apply_matrix(a: [Field; M], x: [Field; N]) -> [Field; N] { y } + +struct PoseidonHasher{ + _state: [Field], + _len: u64, +} + +impl Hasher for PoseidonHasher { + #[field(bn254)] + fn finish(self) -> Field { + let mut result = 0; + assert(self._len < 16); + if self._len == 1 { + result = bn254::hash_1([self._state[0]]); + } + if self._len == 2 { + result = bn254::hash_2([self._state[0],self._state[1]]); + } + if self._len == 3 { + result = bn254::hash_3([self._state[0],self._state[1],self._state[2]]); + } + if self._len == 4 { + result = bn254::hash_4([self._state[0],self._state[1],self._state[2],self._state[3]]); + } + if self._len == 5 { + result = bn254::hash_5([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4]]); + } + if self._len == 6 { + result = bn254::hash_6([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5]]); + } + if self._len == 7 { + result = bn254::hash_7([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6]]); + } + if self._len == 8 { + result = bn254::hash_8([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7]]); + } + if self._len == 9 { + result = bn254::hash_9([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8]]); + } + if self._len == 10 { + result = bn254::hash_10([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8], self._state[9]]); + } + if self._len == 11 { + result = bn254::hash_11([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8], self._state[9], self._state[10]]); + } + if self._len == 12 { + result = bn254::hash_12([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8], self._state[9], self._state[10], self._state[11]]); + } + if self._len == 13 { + result = bn254::hash_13([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8], self._state[9], self._state[10], self._state[11], self._state[12]]); + } + if self._len == 14 { + result = bn254::hash_14([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8], self._state[9], self._state[10], self._state[11], self._state[12], self._state[13]]); + } + if self._len == 15 { + result = bn254::hash_15([self._state[0],self._state[1],self._state[2],self._state[3],self._state[4], self._state[5], self._state[6], self._state[7], self._state[8], self._state[9], self._state[10], self._state[11], self._state[12], self._state[13], self._state[14]]); + } + + result + } + + fn write(&mut self, input: [Field]){ + self._state = self._state.append(input); + self._len += input.len(); + } +} + +impl Default for PoseidonHasher{ + fn default() -> Self{ + PoseidonHasher{ + _state: [], + _len: 0, + } + } +} diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr index 40eea029e82..52229f18dbd 100644 --- a/noir_stdlib/src/hash/poseidon2.nr +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -1,3 +1,6 @@ +use crate::hash::Hasher; +use crate::default::Default; + global RATE = 3; struct Poseidon2 { @@ -9,7 +12,7 @@ struct Poseidon2 { impl Poseidon2 { - pub fn hash(input: [Field; N], message_size: u32) -> Field { + pub fn hash(input: [Field; N], message_size: u64) -> Field { if message_size == N { Poseidon2::hash_internal(input, N, false) } else { @@ -92,12 +95,12 @@ impl Poseidon2 { result } - fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { + fn hash_internal(input: [Field; N], in_len: u64, is_variable_length: bool) -> Field { let two_pow_64 = 18446744073709551616; let iv : Field = (in_len as Field) * two_pow_64; let mut sponge = Poseidon2::new(iv); for i in 0..input.len() { - if i as u32 < in_len { + if i < in_len { sponge.absorb(input[i]); } } @@ -111,3 +114,33 @@ impl Poseidon2 { sponge.squeeze() } } + +struct Poseidon2Hasher{ + _state: [Field], + _len: u64, +} + +impl Hasher for Poseidon2Hasher { + fn finish(self) -> Field { + let iv : Field = (self._state.len() as Field)*18446744073709551616; // iv = (self._state.len() << 64) + let mut sponge = Poseidon2::new(iv); + for i in 0..self._len { + sponge.absorb(self._state[i]); + } + sponge.squeeze() + } + + fn write(&mut self, input: [Field]){ + self._state = self._state.append(input); + self._len += input.len(); + } +} + +impl Default for Poseidon2Hasher{ + fn default() -> Self{ + Poseidon2Hasher{ + _state: [], + _len: 0, + } + } +} diff --git a/noir_stdlib/src/internal.nr b/noir_stdlib/src/internal.nr new file mode 100644 index 00000000000..8d5c01dda7f --- /dev/null +++ b/noir_stdlib/src/internal.nr @@ -0,0 +1,12 @@ +// This file contains functions which should only be used in calls injected by the Noir compiler. +// These functions should not be called manually in user code. +// +// Changes to this file will not be considered breaking. + +#[oracle(assert_message)] +unconstrained fn assert_message_oracle(_input: T) {} +unconstrained pub fn resolve_assert_message(input: T, condition: bool) { + if !condition { + assert_message_oracle(input); + } +} diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index ebde4b88858..90c04472066 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -26,6 +26,7 @@ mod default; mod prelude; mod uint128; mod bigint; +mod internal; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident diff --git a/noirc_macros/Cargo.toml b/noirc_macros/Cargo.toml deleted file mode 100644 index 699e6b01cae..00000000000 --- a/noirc_macros/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "noirc_macros" -version.workspace = true -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -repository.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -noirc_frontend.workspace = true -iter-extended.workspace = true \ No newline at end of file diff --git a/noirc_macros/src/lib.rs b/noirc_macros/src/lib.rs deleted file mode 100644 index 9a916843200..00000000000 --- a/noirc_macros/src/lib.rs +++ /dev/null @@ -1,73 +0,0 @@ -use noirc_frontend::hir::def_collector::dc_crate::UnresolvedFunctions; -use noirc_frontend::hir::def_collector::dc_crate::UnresolvedTraitImpl; -use noirc_frontend::macros_api::parse_program; -use noirc_frontend::macros_api::HirContext; -use noirc_frontend::macros_api::SortedModule; -use noirc_frontend::macros_api::{CrateId, FileId}; -use noirc_frontend::macros_api::{MacroError, MacroProcessor}; - -pub struct AssertMessageMacro; - -impl MacroProcessor for AssertMessageMacro { - fn process_untyped_ast( - &self, - ast: SortedModule, - crate_id: &CrateId, - _context: &HirContext, - ) -> Result { - transform(ast, crate_id) - } - - fn process_unresolved_traits_impls( - &self, - _crate_id: &CrateId, - _context: &mut HirContext, - _unresolved_traits_impls: &[UnresolvedTraitImpl], - _collected_functions: &mut Vec, - ) -> Result<(), (MacroError, FileId)> { - Ok(()) - } - - // This macro does not need to process any information after name resolution - fn process_typed_ast( - &self, - _crate_id: &CrateId, - _context: &mut HirContext, - ) -> Result<(), (MacroError, FileId)> { - Ok(()) - } -} - -fn transform(ast: SortedModule, crate_id: &CrateId) -> Result { - let ast = add_resolve_assert_message_funcs(ast, crate_id)?; - - Ok(ast) -} - -fn add_resolve_assert_message_funcs( - mut ast: SortedModule, - crate_id: &CrateId, -) -> Result { - if !crate_id.is_stdlib() { - return Ok(ast); - } - let assert_message_oracles = " - #[oracle(assert_message)] - unconstrained fn assert_message_oracle(_input: T) {} - unconstrained pub fn resolve_assert_message(input: T, condition: bool) { - if !condition { - assert_message_oracle(input); - } - }"; - - let (assert_msg_funcs_ast, errors) = parse_program(assert_message_oracles); - assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); - - let assert_msg_funcs_ast = assert_msg_funcs_ast.into_sorted(); - - for func in assert_msg_funcs_ast.functions { - ast.functions.push(func) - } - - Ok(ast) -} diff --git a/test_programs/compile_failure/assert_msg_runtime/src/main.nr b/test_programs/compile_failure/assert_msg_runtime/src/main.nr index bec3082550a..fa21442e816 100644 --- a/test_programs/compile_failure/assert_msg_runtime/src/main.nr +++ b/test_programs/compile_failure/assert_msg_runtime/src/main.nr @@ -4,4 +4,4 @@ fn main(x: Field, y: pub Field) { let z = x + y; assert(z != y, f"Expected z != y, but got both equal {z}"); assert_eq(x, y, f"Expected x == y, but x is {x} and y is {y}"); -} \ No newline at end of file +} diff --git a/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr b/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr index 428b2006363..bd77551e304 100644 --- a/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr +++ b/test_programs/compile_failure/brillig_assert_msg_runtime/src/main.nr @@ -7,4 +7,4 @@ unconstrained fn conditional(x: Field) -> Field { assert_eq(z, 25, f"Expected 25 but got {z}"); assert(x == 10, f"Expected x to equal 10, but got {x}"); 1 -} \ No newline at end of file +} diff --git a/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr b/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr index cf3279cac0d..473ad8e8d6a 100644 --- a/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr +++ b/test_programs/compile_failure/brillig_mut_ref_from_acir/src/main.nr @@ -5,4 +5,4 @@ unconstrained fn mut_ref_identity(value: &mut Field) -> Field { fn main(mut x: Field, y: pub Field) { let returned_x = mut_ref_identity(&mut x); assert(returned_x == x); -} \ No newline at end of file +} diff --git a/test_programs/compile_success_contract/contract_with_impl/src/main.nr b/test_programs/compile_success_contract/contract_with_impl/src/main.nr index ddcb5d54d78..1c6b6c217c4 100644 --- a/test_programs/compile_success_contract/contract_with_impl/src/main.nr +++ b/test_programs/compile_success_contract/contract_with_impl/src/main.nr @@ -2,6 +2,6 @@ contract Foo { struct T { x: [Field] } impl T { - fn t(self){} + fn t(self) {} } } diff --git a/test_programs/compile_success_contract/simple_contract/src/main.nr b/test_programs/compile_success_contract/simple_contract/src/main.nr index ed90ac8bd1d..7412e1386bf 100644 --- a/test_programs/compile_success_contract/simple_contract/src/main.nr +++ b/test_programs/compile_success_contract/simple_contract/src/main.nr @@ -8,12 +8,9 @@ contract Foo { fn quadruple(x: Field) -> pub Field { x * 4 } - open fn skibbidy(x: Field) -> pub Field { - x * 5 - } // Regression for issue #3344 #[contract_library_method] - fn foo(x : u8) -> u8 { + fn foo(x: u8) -> u8 { x } } diff --git a/test_programs/execution_success/bigint/src/main.nr b/test_programs/execution_success/bigint/src/main.nr index b93fec370e5..b6c23276c15 100644 --- a/test_programs/execution_success/bigint/src/main.nr +++ b/test_programs/execution_success/bigint/src/main.nr @@ -1,4 +1,5 @@ use dep::std::bigint; +use dep::std::{bigint::Secpk1Fq, println}; fn main(mut x: [u8; 5], y: [u8; 5]) { let a = bigint::Secpk1Fq::from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); @@ -13,4 +14,15 @@ fn main(mut x: [u8; 5], y: [u8; 5]) { let d = a * b - b; let d1 = bigint::Secpk1Fq::from_le_bytes(597243850900842442924.to_le_bytes(10)); assert(d1 == d); + // big_int_example(x[0], x[1]); } + +// docs:start:big_int_example +fn big_int_example(x: u8, y: u8) { + let a = Secpk1Fq::from_le_bytes([x, y, 0, 45, 2]); + let b = Secpk1Fq::from_le_bytes([y, x, 9]); + let c = (a + b) * b / a; + let d = c.to_le_bytes(); + println(d[0]); +} +// docs:end:big_int_example diff --git a/test_programs/execution_success/brillig_cow_assign/Nargo.toml b/test_programs/execution_success/brillig_cow_assign/Nargo.toml new file mode 100644 index 00000000000..a878566a372 --- /dev/null +++ b/test_programs/execution_success/brillig_cow_assign/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_cow_assign" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/test_programs/execution_success/brillig_cow_assign/Prover.toml b/test_programs/execution_success/brillig_cow_assign/Prover.toml new file mode 100644 index 00000000000..882c73b83f8 --- /dev/null +++ b/test_programs/execution_success/brillig_cow_assign/Prover.toml @@ -0,0 +1,2 @@ +items_to_update = 10 +index = 6 diff --git a/test_programs/execution_success/brillig_cow_assign/src/main.nr b/test_programs/execution_success/brillig_cow_assign/src/main.nr new file mode 100644 index 00000000000..e5c3e2bd2f5 --- /dev/null +++ b/test_programs/execution_success/brillig_cow_assign/src/main.nr @@ -0,0 +1,23 @@ +global N = 10; + +unconstrained fn main() { + let mut arr = [0; N]; + let mut mid_change = arr; + + for i in 0..N { + if i == N / 2 { + mid_change = arr; + } + arr[i] = 27; + } + + // Expect: + // arr = [27, 27, 27, 27, 27, 27, 27, 27, 27, 27] + // mid_change = [27, 27, 27, 27, 27, 0, 0, 0, 0, 0] + + let modified_i = N / 2 + 1; + assert_eq(arr[modified_i], 27); + + // Fail here! + assert(mid_change[modified_i] != 27); +} diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index ba51548d9dd..1cae9b1ba41 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -8,9 +8,9 @@ global MAX_NEW_CONTRACTS_PER_TX: u64 = 1; global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; global NUM_FIELDS_PER_SHA256 = 2; -global CALLDATA_HASH_INPUT_SIZE = 169; -global CALL_DATA_HASH_LOG_FIELDS = 4; -global CALL_DATA_HASH_FULL_FIELDS = 165; +global TX_EFFECT_HASH_INPUT_SIZE = 169; +global TX_EFFECT_HASH_LOG_FIELDS = 4; +global TX_EFFECT_HASH_FULL_FIELDS = 165; struct PublicDataUpdateRequest { leaf_slot : Field, @@ -99,7 +99,7 @@ impl U256 { } unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA256] { - let mut calldata_hash_inputs = [0; CALLDATA_HASH_INPUT_SIZE]; + let mut tx_effects_hash_inputs = [0; TX_EFFECT_HASH_INPUT_SIZE]; let new_note_hashes = kernel_data.new_note_hashes; let new_nullifiers = kernel_data.new_nullifiers; @@ -111,65 +111,65 @@ unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA let mut offset = 0; for j in 0..MAX_NEW_NOTE_HASHES_PER_TX { - calldata_hash_inputs[offset + j] = new_note_hashes[j]; + tx_effects_hash_inputs[offset + j] = new_note_hashes[j]; } offset += MAX_NEW_NOTE_HASHES_PER_TX ; for j in 0..MAX_NEW_NULLIFIERS_PER_TX { - calldata_hash_inputs[offset + j] = new_nullifiers[j]; + tx_effects_hash_inputs[offset + j] = new_nullifiers[j]; } offset += MAX_NEW_NULLIFIERS_PER_TX ; for j in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { - calldata_hash_inputs[offset + j * 2] = + tx_effects_hash_inputs[offset + j * 2] = public_data_update_requests[j].leaf_slot; - calldata_hash_inputs[offset + j * 2 + 1] = + tx_effects_hash_inputs[offset + j * 2 + 1] = public_data_update_requests[j].new_value; } offset += MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * 2; for j in 0..MAX_NEW_L2_TO_L1_MSGS_PER_TX { - calldata_hash_inputs[offset + j] = newL2ToL1msgs[j]; + tx_effects_hash_inputs[offset + j] = newL2ToL1msgs[j]; } offset += MAX_NEW_L2_TO_L1_MSGS_PER_TX; let contract_leaf = kernel_data.new_contracts[0]; - calldata_hash_inputs[offset] = contract_leaf.hash(); + tx_effects_hash_inputs[offset] = contract_leaf.hash(); offset += MAX_NEW_CONTRACTS_PER_TX; let new_contracts = kernel_data.new_contracts; - calldata_hash_inputs[offset] = new_contracts[0].contract_address; + tx_effects_hash_inputs[offset] = new_contracts[0].contract_address; - calldata_hash_inputs[offset + 1] = new_contracts[0].portal_contract_address; + tx_effects_hash_inputs[offset + 1] = new_contracts[0].portal_contract_address; offset += MAX_NEW_CONTRACTS_PER_TX * 2; for j in 0..NUM_FIELDS_PER_SHA256 { - calldata_hash_inputs[offset + j] = encryptedLogsHash[j]; + tx_effects_hash_inputs[offset + j] = encryptedLogsHash[j]; } offset += NUM_ENCRYPTED_LOGS_HASHES_PER_TX * NUM_FIELDS_PER_SHA256; for j in 0..NUM_FIELDS_PER_SHA256 { - calldata_hash_inputs[offset + j] = unencryptedLogsHash[j]; + tx_effects_hash_inputs[offset + j] = unencryptedLogsHash[j]; } offset += NUM_UNENCRYPTED_LOGS_HASHES_PER_TX * NUM_FIELDS_PER_SHA256; - assert_eq(offset, CALLDATA_HASH_INPUT_SIZE); // Sanity check + assert_eq(offset, TX_EFFECT_HASH_INPUT_SIZE); // Sanity check - let mut hash_input_flattened = [0; CALL_DATA_HASH_FULL_FIELDS * 32 + CALL_DATA_HASH_LOG_FIELDS * 16]; - for offset in 0..CALL_DATA_HASH_FULL_FIELDS { - let input_as_bytes = calldata_hash_inputs[offset].to_be_bytes(32); + let mut hash_input_flattened = [0; TX_EFFECT_HASH_FULL_FIELDS * 32 + TX_EFFECT_HASH_LOG_FIELDS * 16]; + for offset in 0..TX_EFFECT_HASH_FULL_FIELDS { + let input_as_bytes = tx_effects_hash_inputs[offset].to_be_bytes(32); for byte_index in 0..32 { hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; } } - for log_field_index in 0..CALL_DATA_HASH_LOG_FIELDS { - let input_as_bytes = calldata_hash_inputs[CALL_DATA_HASH_FULL_FIELDS + log_field_index].to_be_bytes(16); + for log_field_index in 0..TX_EFFECT_HASH_LOG_FIELDS { + let input_as_bytes = tx_effects_hash_inputs[TX_EFFECT_HASH_FULL_FIELDS + log_field_index].to_be_bytes(16); for byte_index in 0..16 { - hash_input_flattened[CALL_DATA_HASH_FULL_FIELDS * 32 + log_field_index * 16 + byte_index] = input_as_bytes[byte_index]; + hash_input_flattened[TX_EFFECT_HASH_FULL_FIELDS * 32 + log_field_index * 16 + byte_index] = input_as_bytes[byte_index]; } } diff --git a/test_programs/execution_success/double_verify_proof/Nargo.toml b/test_programs/execution_success/double_verify_proof/Nargo.toml index a4edd2e4288..c5954f54bdb 100644 --- a/test_programs/execution_success/double_verify_proof/Nargo.toml +++ b/test_programs/execution_success/double_verify_proof/Nargo.toml @@ -2,4 +2,6 @@ name = "double_verify_proof" type = "bin" authors = [""] -[dependencies] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/double_verify_proof/src/main.nr b/test_programs/execution_success/double_verify_proof/src/main.nr index e4c6926efbc..4edf5c5af9f 100644 --- a/test_programs/execution_success/double_verify_proof/src/main.nr +++ b/test_programs/execution_success/double_verify_proof/src/main.nr @@ -1,6 +1,5 @@ use dep::std; -#[recursive] fn main( verification_key: [Field; 114], // This is the proof without public inputs attached. diff --git a/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml b/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml new file mode 100644 index 00000000000..b7688b33bfd --- /dev/null +++ b/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "double_verify_proof_recursive" +type = "bin" +authors = [""] +[dependencies] diff --git a/test_programs/execution_success/double_verify_proof_recursive/Prover.toml b/test_programs/execution_success/double_verify_proof_recursive/Prover.toml new file mode 100644 index 00000000000..dff48212e50 --- /dev/null +++ b/test_programs/execution_success/double_verify_proof_recursive/Prover.toml @@ -0,0 +1,5 @@ +key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" +proof = ["0x000000000000000000000000000000d62b795bec274279129a71195796825fcc","0x00000000000000000000000000000000000793ab763140f20a68a6bd2721fd74","0x00000000000000000000000000000053141d06d3307b36153f321511199e579c","0x00000000000000000000000000000000000a4b55d6c21f98a9c434911dcb5c67","0x0000000000000000000000000000005f9d324c0abd22cec92d99dbec438e9491","0x0000000000000000000000000000000000240dfafe1b53dc27147cbab14ea893","0x000000000000000000000000000000044a61d3aac32c6931247cf334a19d9611","0x000000000000000000000000000000000003f0f8cf4207bfa85c23ec9f8d0c88","0x00000000000000000000000000000002168a470e39ba2ac266f6b474de12045f","0x000000000000000000000000000000000025791e7d3feab542345c00ec5a30df","0x000000000000000000000000000000dcafd76d4c3640969c80e017b951ef6397","0x00000000000000000000000000000000001d27f75a1256771e88e0c86fc42dbc","0x0000000000000000000000000000007347ae7d2d9d7fc2b8f0baa014ee1fed9f","0x000000000000000000000000000000000018bd927f42bf7caf9555f56f09000d","0x000000000000000000000000000000041f765f83cbe5904c8f453f70a4531d10","0x00000000000000000000000000000000001858aabeeb5331a221419f4fed1c19","0x000000000000000000000000000000d254a54caaedf8287b9af951b2f2611121","0x000000000000000000000000000000000005ab493623c9563cf2e55ba5f18200","0x00000000000000000000000000000014f24cddc1a02440dc63637df8032c8074","0x000000000000000000000000000000000011950c16cef98471b1d78b935195a4","0x000000000000000000000000000000b0340b459e6bd5cc8f031c8654a502897f","0x00000000000000000000000000000000000e1cf3968dac4545a76a2ae58e512c","0x0000000000000000000000000000002adf7218aa06ca0d2c2e600dcc39193a2d","0x00000000000000000000000000000000001302e7e4b0f14749bd885ca25588b6","0x00000000000000000000000000000092009ce4056e79ab815d8cdfd4491138ae","0x000000000000000000000000000000000018af11e853c6cf2f0f6274b0da8133","0x000000000000000000000000000000dd3dc6f49232141718527b3a0e4b26e21d","0x00000000000000000000000000000000001a877853348a8b695c4f9a9aa4ce68","0x000000000000000000000000000000aecfc56ba07155450b368140d6324023b5","0x000000000000000000000000000000000029c11052798c57ece614617d33fcc2","0x000000000000000000000000000000eb106ffc816d16fb84e84b0b61157b2603","0x000000000000000000000000000000000026c3cac16206899a21cb5126841446","0x000000000000000000000000000000a782ed54805fe845068b362b58e2fa34ec","0x00000000000000000000000000000000000cf046a1bfcc666b7f28b572676073","0x000000000000000000000000000000b931c8dda60bb4aca4cc817f5540f1209f","0x000000000000000000000000000000000024ad50c3936fafc3d190e6a4874223","0x000000000000000000000000000000cce90cfbaf5671c8c8652db28a3a9566f7","0x000000000000000000000000000000000003574db9d0f84380c9635660f86354","0x0000000000000000000000000000003eb3e1dc31846a90f721e7a08c6d6dc4f7","0x000000000000000000000000000000000028999a700cd1abae1a288eebb9a91c","0x000000000000000000000000000000c1be4d385b11387e14eb9817050d772f78","0x000000000000000000000000000000000003c56b5bad8b4484c66ac921f1f102","0x000000000000000000000000000000ace245cabf0f00dc7fd253dd8af0377a14","0x0000000000000000000000000000000000107f1731fcf34b364c813599fa1df7","0x035b937d404932b542b706eb810ef4a7dca4566d4dde1ad6a8717f46167ead7e","0x17608cef3dc7960f41cb1295706df663727d45ee598a61e05e989d111449fb65","0x054712a950ad67da3aa860e49e6891f99b586b7f37caff94eb013fdb374b61ee","0x04b755083086c769b7f593e0e48d68dc54be808203351380ca5566a48149d8bb","0x17d7670b0915235f626fdc1d7e1134d2be906ef138d7843384b3ebc23b1d630f","0x064cf544ab5f4e3dab47960502cccc83321fb275068dfbdd3a2fcbc6dddcaa65","0x083338262712e2b66769ea40d9f412b18caa1bc81a51ff5a50b6c41f8c4b3d23","0x0cdd38958cab97defde00f4a5961b6fd676e29d9f2c352f6bb2c68b91f83f8af","0x02c8bdd005c2f43a0a8cbb2744916ce5c322dfa5b23367a829c12699f4036d32","0x25bac73c7e7b659fbea3135b7a0decf9db8dc3045bd2837dae337c64cc722546","0x19eb361aa419d37bce3d2e8b2b7692a02a9559e83d7f3d8fe9169970fbbc2cba","0x2494bd5106d00e05c7ea60e632e9fe03773b7f2c5b662aa37ec512a01f4a0775","0x18c52c2f2c6e7be1d7847c15e452a3a9c64316103d12e4b5b9a82fac4e940ee9","0x0e0342810456ef78f498c1bfa085a5f3cbc06db1f32fabd0ea9ad27dccac1680","0x024c13d6ef56af33ed7164ea8e47ddecc8a487b000d8b1b45edcd3895a503ba2","0x26e0d127f626bd39b55bc5d0c131dbf03fe006dc5c3edc57dda1e629799a4317","0x1b1140061bc52b15c4f5e100729a81968ee79dc03deb966a18850335a8e44a8b","0x1bb76f945199e71d531a89288912087a02dd0e83020e65d671485bf2e5e86e1a","0x29269900859c6d86e404185b415bf3b279cd100f38cfdb0077e8d6a299c4fd35","0x22b5e94bae2f6f0cdb424a3b12c4bf82cec3fb228e012c1974ed457827bbe012","0x18d3543a93249778e7a57936170dae85ffc47c2567f2d0076a32c0bb86fcf10a","0x03721dc2670206cde42a175fd56bcce32cf6cb8801450a8e8e4b3d4e07785973","0x2806db136dd214d3ac1478460855cae6a4324ab45cab35320d104fee26c260e8","0x1c3749f1937082afbbae9375b9be708cf339e1983e57ef4447f36cfa560c685c","0x1067b8cfb90ef08bcb48aea56b2716334241787c2004a95682d68a0685566fd0","0x0f41aee4416398f1d48ffc302403273cddef34a41f98507c53682041d82e51ff","0x10d854c9f0bfbdff7ca91a68f4978e9a79e7b14243d92f465f17bdf88d9f64f8","0x00000000000000000000000000000000018938b11099e0cdc05ddab84a153a97","0x0000000000000000000000000000000001d7dda1471f0dc3b3a3d3438c197982","0x00000000000000000000000000000000022682917da43ab9a6e9cbcece1db86d","0x2453913e6b0f36eab883ac4b0e0604d56aaeb9c55e641135173e63c342f1a660","0x05216c1b58dc43a49d01aaba3113b0e86be450fc17d28016e648e7162a1b67fb","0x152b34845a0222a2b41354c0d395a250d8363dc18748647d85acd89d6934ec56","0x1dfc6e971ce82b7dcda1f7f282713c6e22a8c79258a61209bda69719806da544","0x2968dd8b3af8e3953f1fbbd72f4c49b8270597bb27d4037adc157ac6083bee60","0x1b9425b88a4c7d39b3d75afe66917a9aa1d2055724392bc01fb918d84ff1410e","0x04ab571f236d8e750904dc307dd274003d9130f1a7110e4c1521cfb408877c73","0x2ad84f26fdc5831545272d02b806bb0e6dae44e71f73552c4eb9ff06030748c7","0x020e632b99d325db774b8630fb50b9a4e74d35b7f27d9fc02c65087ee747e42c","0x09a8c5a3171268cb61c02515c01c109889200ed13f415ae54df2078bbb887f92","0x1143281a9451abbb4c34c3fa84e7678c2af2e7ea8c05160a6f7f06988fc91af8","0x000000000000000000000000000000cbda736ca5cf6bc75413c2cc9e28ab0a68","0x00000000000000000000000000000000001ee78c9cc56aa5991062ae2e338587","0x000000000000000000000000000000bc9bfcdebb486f4cb314e681d2cc5f8df6","0x00000000000000000000000000000000000ad538431d04771bca7f633cb659ff","0x000000000000000000000000000000d45b317afcefa466a59bba9e171f1af70c","0x0000000000000000000000000000000000133c50180ea17932e4881124e7a7c6","0x000000000000000000000000000000fc9ed37f543775849f3e84eaa06f77f992","0x00000000000000000000000000000000001372873c9c051d1baff99248b8f70e"] +public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] +verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] +proof_b = ["0x000000000000000000000000000000f05c69448ca29bdf52076f9b073bb30fed","0x000000000000000000000000000000000028c86bb3e27b4aaaaef126f7df5349","0x00000000000000000000000000000026ae031fc93594375dfc7f3bbe027f97d5","0x000000000000000000000000000000000000dd12c7290fe7f775796a233b8590","0x000000000000000000000000000000c1ee6631704de424d010c5c4ac8293ac49","0x00000000000000000000000000000000002f41818c9aa83f5c8d9bdd128015b9","0x000000000000000000000000000000b50a5801482f7e3a5de8ab3cce0f10b0d3","0x000000000000000000000000000000000022a0bc69c293dbf293b25bc9eef7f8","0x0000000000000000000000000000003b02abf1967ef394154dc15d763135e903","0x00000000000000000000000000000000000d8a2ee46acc6d1ed8d517b56d47c8","0x00000000000000000000000000000039bf0d1b3d8cf9de898f101c626e978d78","0x0000000000000000000000000000000000008faa7df2451a24d291a9b584f1a5","0x000000000000000000000000000000c1dae329ed7adf63a2d89a5f16fb98b6d8","0x00000000000000000000000000000000001ff0bc16fc0bd4aa2d6255690453c2","0x000000000000000000000000000000d12d7589f853a9b472613efa56689beaf1","0x00000000000000000000000000000000002d6fbc798f4403751df6aeee8bedd3","0x0000000000000000000000000000007c1fa069cb17194fecf88db9dd54a4ee36","0x0000000000000000000000000000000000268e026f9814822a42b2d59eec5d24","0x000000000000000000000000000000c3fb56beab774218cd63498fc050a5fd9b","0x00000000000000000000000000000000000071c014d7b5063f005a0bc2ee1af4","0x000000000000000000000000000000ae12b25371c6af42bbe0a85cddd2eaebc7","0x000000000000000000000000000000000026d270e1ffc9c7c344c694dfadda83","0x00000000000000000000000000000080280858c6be461716921caa3c26f3f6f3","0x000000000000000000000000000000000001dcdd3f39e27d0ce6aa5d14dff4c1","0x000000000000000000000000000000080e1d2c913c834ebcf7e0600c076c08fd","0x00000000000000000000000000000000002df3d142217694e65fb7c355d62764","0x000000000000000000000000000000e5e336f3f59d77e500f49771bfbeb12e83","0x000000000000000000000000000000000028fffe08bdc4c0690643d2e1a1275f","0x000000000000000000000000000000db5618b32afc13e18f21b39f3fbede9d11","0x00000000000000000000000000000000001d244818370d43fb7e8bc67e03787b","0x0000000000000000000000000000006bcc1fd3f9f78449ad1df1bc11bc379edd","0x000000000000000000000000000000000009ac9cbb285edbf5b3a973f3f5f1cb","0x000000000000000000000000000000fd885905b6c0fc95bb4dd0b11f6797d4b3","0x000000000000000000000000000000000021f07995cdd835145e19c38127c562","0x000000000000000000000000000000bbbf2b975c2c97ae4b45c4a52059e53ee3","0x000000000000000000000000000000000024158163788841cf4590bbc1e89a90","0x0000000000000000000000000000009aca93d2b1386ea412d4b36ea5bb9894a8","0x00000000000000000000000000000000002532d1d210e8ed4c2f5c00cbaaa475","0x000000000000000000000000000000634a88caa1d77cb6b5fe77cac31458fc31","0x00000000000000000000000000000000000bdf18bae92fce7cfddab5520cac6e","0x000000000000000000000000000000622e9626255170ccec77602c755aa193e1","0x000000000000000000000000000000000001d4edba370e04436a988bad05dada","0x000000000000000000000000000000b52934323a0aec8f803cdaafee2ab7bfb2","0x0000000000000000000000000000000000155312af5e0e25ca9fd61aef9e58ed","0x06270b517855f6f6a608e432883d1d1030a12a1e33022dc142b7728691421da2","0x2af7c794d7b720b25eb1df0afd8c8e3c15b6e518194c3caea7966a5f8210ff04","0x073fe573aeb27d81a5713be93e1365390dcbc3c8e7439ff1d36a84cc014f5642","0x11351b961147431e54535248b58b35cf5cddb9b13827899167617d7a96794d64","0x297c9421c9c3db286770787c35b86bc41583386491b4ae55e5fa81aefa21efc4","0x0f4eeca3ff4a3495f859898937688652d33f9b4dd3e003e12adf15278e0997c3","0x133e3d8b82721d40d919f2326810ba6f07eff3f7d20d86b2bde692a811522019","0x2c502f53c9698b73bb8c8f9b9cf2d705d16a64a7040348b4b39c637a2064316c","0x0cbc1971e1c566cde9d9125c91cdc88e817db182692f836c1a5170a6246eaf73","0x12c47793e7db706c637cd4b4d96d227f569850176b852b1fe8ad522ddb38ef0e","0x0cd7b300e9309a135285be1aeb02b152f97931a7357ab6d609a2cb1970aab877","0x2a7789dfe286c9d0a7592f1c9316e730cb14c9d843aefc4764d76e7f8571c96a","0x248ac54ce3dbf37796621882a4ac76046df5ab680da487fd85cce76b1ae392d3","0x149d1d07cebe320f77b03533e34912545cedeae62bd9778d37724728762b5710","0x00fe29daebdaed61309790e70e2dcefa3f3af4c6c965ce424b8dbcf09b8e4b49","0x2b75b3bace61b731d7f0c003a144b62b0a4fbe9f0d14ca89b0652b70210014b3","0x2588ef27cfb6e0d8c6f9a969b2da44fead30a02ed70a563fd15aa45bb671de1c","0x2b74d7674b55642697b4a1e226eddb0e4918b2d57aa5b99093dc46cadcdea000","0x244c626845d3a5040f08f01e9611f968ad675ca857789149b13a0cfa83a2e064","0x2cb8d02f90cae33fd7bcfb80af4aff067c4f5fc4b3f9228d5b8f768bc8f6c971","0x1372f3d1f04e0c39a50e823d5da03d70bebe19a1b8e28f8c2ff601cc0bfc0095","0x19af6601d2613426a50b7c35d60562a5f2f2634e6af56dac13459632e15570ee","0x13c2a16ed3b65dcd9414659be79af17995d344de34eaf962343b0f1e76c73a57","0x0dd5dcdbd50b8774831d4f01f930804d38b4266dfee085185530880a0c3903c0","0x07e91848d660b11b722638680ac60f20db9507fdc8d610ce762600f5a1aacd29","0x1f9c2a94d10c0a7fb60292cfc46fd3d2501181bea0ffe1f5f2501d474be3a785","0x14edb9c5bd389eae08a5ea2a7a1662894e1e878c142084d966a625bef68cf7c3","0x00000000000000000000000000000000cecd01810814d175f0a533f0067618c4","0x00000000000000000000000000000000f82935013ce5c82720c63e533af41db8","0x000000000000000000000000000000012185688171b6bed850e748b66f7222ac","0x2dd7f5ff2150155c2ac86ebe28d9ecbca2eea812b0021ab2bceae111cfea8325","0x04ea6c2daf2b9e827d2213c3d03953410dcf1ed67ba34a3c00e772be92606a8b","0x163f2bd18dcde52f99b9867c944780fd718d1612927053b139b280fc55013d1b","0x05e388fd160ccac30a8f7b18a4bd042f705e92b5937e8c0e9478e2ff623907c6","0x00ba3f6f527d6ed3ff17a63b1d5be3c42bdfae88fdf63311fc7b871157939309","0x16187d9daa8c2e5a1a9ab15be7ca6a8feebfb31bea76f9a3ca69381881c70561","0x0f64522e4904edb7377b14a7b9dad848829167324ef5c016346b3ad8251191ee","0x273bbe6000a4001dce369e5a36cc0b0ca3fd351665b688238aa8c556a6ca6b8e","0x022d2232efb2faa8307846c9a4c697aabad1b7f1336b35ad72fa8922975b49d9","0x0d82d478bff3955c4b0a34ef94427ca5f9da23147ad953c89f2e428277ec2825","0x18d886be90343010659c231583be61a138e28e37c24771e3cb61fbe2587d0671","0x000000000000000000000000000000196ba6a58dbeb7c34cb1d6287e23d434de","0x00000000000000000000000000000000001df8ae8a1589590f8863c1fefd8dfd","0x000000000000000000000000000000f30e11b2c5fbefa166cbb9f58c5f8e1a4c","0x000000000000000000000000000000000026420ade7666bc0ab1cf1fd9d0c534","0x0000000000000000000000000000000feb5b7d8260d25a1ee1ce76ff461673fc","0x00000000000000000000000000000000002bd2ac6223a80671b777bf5dca70a4","0x000000000000000000000000000000690f757006d2fa1ddb0114c9f268783537","0x000000000000000000000000000000000023ad36feadd91e50118f32e97a0204"] \ No newline at end of file diff --git a/test_programs/execution_success/double_verify_proof_recursive/src/main.nr b/test_programs/execution_success/double_verify_proof_recursive/src/main.nr new file mode 100644 index 00000000000..e4c6926efbc --- /dev/null +++ b/test_programs/execution_success/double_verify_proof_recursive/src/main.nr @@ -0,0 +1,29 @@ +use dep::std; + +#[recursive] +fn main( + verification_key: [Field; 114], + // This is the proof without public inputs attached. + // + // This means: the size of this does not change with the number of public inputs. + proof: [Field; 93], + public_inputs: pub [Field; 1], + // This is currently not public. It is fine given that the vk is a part of the circuit definition. + // I believe we want to eventually make it public too though. + key_hash: Field, + proof_b: [Field; 93] +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} diff --git a/test_programs/execution_success/eddsa/src/main.nr b/test_programs/execution_success/eddsa/src/main.nr index 4404ffe75f7..fd1a95ee5fb 100644 --- a/test_programs/execution_success/eddsa/src/main.nr +++ b/test_programs/execution_success/eddsa/src/main.nr @@ -2,7 +2,9 @@ use dep::std::compat; use dep::std::ec::consts::te::baby_jubjub; use dep::std::ec::tecurve::affine::Point as TEPoint; use dep::std::hash; -use dep::std::eddsa::{eddsa_to_pub, eddsa_poseidon_verify}; +use dep::std::eddsa::{eddsa_to_pub, eddsa_poseidon_verify, eddsa_verify_with_hasher}; +use dep::std::hash::poseidon2::Poseidon2Hasher; +use dep::std::hash::pedersen::PedersenHasher; fn main(msg: pub Field, _priv_key_a: Field, _priv_key_b: Field) { // Skip this test for non-bn254 backends @@ -48,5 +50,11 @@ fn main(msg: pub Field, _priv_key_a: Field, _priv_key_b: Field) { assert(!eddsa_poseidon_verify(pub_key_a.x, pub_key_a.y, s_b, r8_b.x, r8_b.y, msg)); // User A's signature over the message can't be used with another message assert(!eddsa_poseidon_verify(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg + 1)); + // Using a different hash should fail + let mut hasher = Poseidon2Hasher::default(); + assert(!eddsa_verify_with_hasher(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg, &mut hasher)); + // Using a different hash should fail + let mut hasher = PedersenHasher::default(); + assert(!eddsa_verify_with_hasher(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg, &mut hasher)); } } diff --git a/test_programs/execution_success/hashmap/src/main.nr b/test_programs/execution_success/hashmap/src/main.nr index 597a5c0b7de..4d2cbd45993 100644 --- a/test_programs/execution_success/hashmap/src/main.nr +++ b/test_programs/execution_success/hashmap/src/main.nr @@ -37,6 +37,8 @@ fn main(input: [Entry; HASHMAP_LEN]) { test_retain(); test_iterators(); test_mut_iterators(); + + doc_tests(); } // Insert, get, remove. @@ -154,9 +156,9 @@ fn test_iterators() { hashmap.insert(5, 7); hashmap.insert(11, 13); - let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); - let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); - let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + let keys: [K; 3] = cut(hashmap.keys()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).sort_via(V_CMP); + let entries: [(K, V); 3] = cut(hashmap.entries()).sort_via(KV_CMP); assert(keys == [2, 5, 11], "Got incorrect iteration of keys."); assert(values == [3, 7, 13], "Got incorrect iteration of values."); @@ -177,8 +179,8 @@ fn test_mut_iterators() { let f = |v: V| -> V{ v * 5}; hashmap.iter_values_mut(f); - let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); - let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + let keys: [K; 3] = cut(hashmap.keys()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).sort_via(V_CMP); assert(keys == [6, 15, 33], f"Got incorrect iteration of keys: {keys}"); assert(values == [15, 35, 65], "Got incorrect iteration of values."); @@ -186,7 +188,175 @@ fn test_mut_iterators() { let f = |k: K, v: V| -> (K, V){(k * 2, v * 2)}; hashmap.iter_mut(f); - let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + let entries: [(K, V); 3] = cut(hashmap.entries()).sort_via(KV_CMP); assert(entries == [(12, 30), (30, 70), (66, 130)], "Got incorrect iteration of entries."); } + +// docs:start:type_alias +type MyMap = HashMap>; +// docs:end:type_alias + +/// Tests examples from the stdlib hashmap documentation +fn doc_tests() { + // docs:start:default_example + let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); + // docs:end:default_example + + // docs:start:with_hasher_example + let my_hasher: BuildHasherDefault = Default::default(); + let hashmap: HashMap> = HashMap::with_hasher(my_hasher); + assert(hashmap.is_empty()); + // docs:end:with_hasher_example + + // docs:start:insert_example + let mut map: HashMap> = HashMap::default(); + map.insert(12, 42); + assert(map.len() == 1); + // docs:end:insert_example + + get_example(map); + + // docs:start:remove_example + map.remove(12); + assert(map.is_empty()); + + // If a key was not present in the map, remove does nothing + map.remove(12); + assert(map.is_empty()); + // docs:end:remove_example + + // docs:start:is_empty_example + assert(map.is_empty()); + + map.insert(1, 2); + assert(!map.is_empty()); + + map.remove(1); + assert(map.is_empty()); + // docs:end:is_empty_example + + // docs:start:len_example + // This is equivalent to checking map.is_empty() + assert(map.len() == 0); + + map.insert(1, 2); + map.insert(3, 4); + map.insert(5, 6); + assert(map.len() == 3); + + // 3 was already present as a key in the hash map, so the length is unchanged + map.insert(3, 7); + assert(map.len() == 3); + + map.remove(1); + assert(map.len() == 2); + // docs:end:len_example + + // docs:start:capacity_example + let empty_map: HashMap> = HashMap::default(); + assert(empty_map.len() == 0); + assert(empty_map.capacity() == 42); + // docs:end:capacity_example + + // docs:start:clear_example + assert(!map.is_empty()); + map.clear(); + assert(map.is_empty()); + // docs:end:clear_example + + // docs:start:contains_key_example + if map.contains_key(7) { + let value = map.get(7); + assert(value.is_some()); + } else { + println("No value for key 7!"); + } + // docs:end:contains_key_example + + entries_examples(map); + iter_examples(map); + + // docs:start:retain_example + map.retain(|k, v| (k != 0) & (v != 0)); + // docs:end:retain_example + + // docs:start:eq_example + let mut map1: HashMap> = HashMap::default(); + let mut map2: HashMap> = HashMap::default(); + + map1.insert(1, 2); + map1.insert(3, 4); + + map2.insert(3, 4); + map2.insert(1, 2); + + assert(map1 == map2); + // docs:end:eq_example +} + +// docs:start:get_example +fn get_example(map: HashMap>) { + let x = map.get(12); + + if x.is_some() { + assert(x.unwrap() == 42); + } +} +// docs:end:get_example + +fn entries_examples(map: HashMap>) { + // docs:start:entries_example + let entries = map.entries(); + + // The length of a hashmap may not be compile-time known, so we + // need to loop over its capacity instead + for i in 0..map.capacity() { + if i < entries.len() { + let (key, value) = entries.get(i); + println(f"{key} -> {value}"); + } + } + // docs:end:entries_example + + // docs:start:keys_example + let keys = map.keys(); + + for i in 0..keys.max_len() { + if i < keys.len() { + let key = keys.get_unchecked(i); + let value = map.get(key).unwrap_unchecked(); + println(f"{key} -> {value}"); + } + } + // docs:end:keys_example + + // docs:start:values_example + let values = map.values(); + + for i in 0..values.max_len() { + if i < values.len() { + let value = values.get_unchecked(i); + println(f"Found value {value}"); + } + } + // docs:end:values_example +} + +fn iter_examples(mut map: HashMap>) { + // docs:start:iter_mut_example + // Add 1 to each key in the map, and double the value associated with that key. + map.iter_mut(|k, v| (k + 1, v * 2)); + // docs:end:iter_mut_example + + // docs:start:iter_keys_mut_example + // Double each key, leaving the value associated with that key untouched + map.iter_keys_mut(|k| k * 2); + // docs:end:iter_keys_mut_example + + // docs:start:iter_values_mut_example + // Halve each value + map.iter_values_mut(|v| v / 2); + // docs:end:iter_values_mut_example +} diff --git a/test_programs/execution_success/hashmap/src/utils.nr b/test_programs/execution_success/hashmap/src/utils.nr index 45c9ca9bbf7..ee73245a902 100644 --- a/test_programs/execution_success/hashmap/src/utils.nr +++ b/test_programs/execution_success/hashmap/src/utils.nr @@ -1,10 +1,10 @@ -// Compile-time: cuts the M first elements from the [T; N] array. -pub(crate) fn cut(input: [T; N]) -> [T; M] { - assert(M as u64 < N as u64, "M should be less than N."); +// Compile-time: cuts the M first elements from the BoundedVec. +pub(crate) fn cut(input: BoundedVec) -> [T; M] { + assert(M < N, "M should be less than N."); - let mut new = [dep::std::unsafe::zeroed(); M]; + let mut new = BoundedVec::new(); for i in 0..M { - new[i] = input[i]; + new.push(input.get(i)); } - new + new.storage() } diff --git a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr index 939b99595c7..a1607956190 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr +++ b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr @@ -9,7 +9,7 @@ fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field let hash2 = poseidon::bn254::hash_4(x2); assert(hash2 == y2); - let hash3 = poseidon2::Poseidon2::hash(x3, x3.len() as u32); + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len()); assert(hash3 == y3); } // docs:end:poseidon diff --git a/test_programs/execution_success/regression_4449/Nargo.toml b/test_programs/execution_success/regression_4449/Nargo.toml new file mode 100644 index 00000000000..925420a03a8 --- /dev/null +++ b/test_programs/execution_success/regression_4449/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_4449" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/regression_4449/Prover.toml b/test_programs/execution_success/regression_4449/Prover.toml new file mode 100644 index 00000000000..81af476bcc9 --- /dev/null +++ b/test_programs/execution_success/regression_4449/Prover.toml @@ -0,0 +1,3 @@ + +x = 0xbd +result = [204, 59, 83, 197, 18, 1, 128, 43, 247, 28, 104, 225, 106, 13, 20, 187, 42, 26, 67, 150, 48, 75, 238, 168, 121, 247, 142, 160, 71, 222, 97, 188] \ No newline at end of file diff --git a/test_programs/execution_success/regression_4449/src/main.nr b/test_programs/execution_success/regression_4449/src/main.nr new file mode 100644 index 00000000000..454a93f5d1a --- /dev/null +++ b/test_programs/execution_success/regression_4449/src/main.nr @@ -0,0 +1,14 @@ +// Regression test for issue #4449 +use dep::std; + +fn main(x: u8, result: [u8; 32]) { + let x = x % 31; + let mut digest = [0; 32]; + for i in 0..70 { + let y = x + i; + let a = [y, x, 32, 0, y + 1, y - 1, y - 2, 5]; + digest = std::sha256::digest(a); + } + + assert(digest == result); +} diff --git a/test_programs/execution_success/slices/src/main.nr b/test_programs/execution_success/slices/src/main.nr index eca42a660c4..e44edf872a4 100644 --- a/test_programs/execution_success/slices/src/main.nr +++ b/test_programs/execution_success/slices/src/main.nr @@ -50,6 +50,10 @@ fn main(x: Field, y: pub Field) { // The parameters to this function must come from witness values (inputs to main) regression_merge_slices(x, y); regression_2370(); + + regression_4418(x); + regression_slice_call_result(x, y); + regression_4506(); } // Ensure that slices of struct/tuple values work. fn regression_2083() { @@ -297,3 +301,32 @@ fn regression_2370() { let mut slice = []; slice = [1, 2, 3]; } + +fn regression_4418(x: Field) { + let mut crash = x.to_be_bytes(32); + + if x != 0 { + crash[0] = 10; + } +} + +fn regression_slice_call_result(x: Field, y: Field) { + let mut slice = merge_slices_return(x, y); + if x != 0 { + slice = slice.push_back(5); + slice = slice.push_back(10); + } else { + slice = slice.push_back(5); + } + assert(slice.len() == 5); + assert(slice[0] == 0); + assert(slice[1] == 0); + assert(slice[2] == 10); + assert(slice[3] == 5); + assert(slice[4] == 10); +} + +fn regression_4506() { + let slice: [Field] = [1, 2, 3]; + assert(slice == slice); +} diff --git a/test_programs/noir_test_failure/should_fail_suite_with_one_failure/Nargo.toml b/test_programs/noir_test_failure/should_fail_suite_with_one_failure/Nargo.toml new file mode 100644 index 00000000000..3d2cf2c6096 --- /dev/null +++ b/test_programs/noir_test_failure/should_fail_suite_with_one_failure/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "should_fail_with_mismatch" +type = "bin" +authors = [""] +[dependencies] diff --git a/test_programs/test_libraries/exporting_lib/src/lib.nr b/test_programs/test_libraries/exporting_lib/src/lib.nr index af1fd7a32de..bfb1819132a 100644 --- a/test_programs/test_libraries/exporting_lib/src/lib.nr +++ b/test_programs/test_libraries/exporting_lib/src/lib.nr @@ -1,4 +1,3 @@ - struct MyStruct { inner: Field } diff --git a/tooling/acvm_cli/Cargo.toml b/tooling/acvm_cli/Cargo.toml new file mode 100644 index 00000000000..72424405d36 --- /dev/null +++ b/tooling/acvm_cli/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "acvm_cli" +description = "The entrypoint for executing the ACVM" +# x-release-please-start-version +version = "0.40.0" +# x-release-please-end +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +# Rename binary from `acvm_cli` to `acvm` +[[bin]] +name = "acvm" +path = "src/main.rs" + +[dependencies] +thiserror.workspace = true +toml.workspace = true +color-eyre = "0.6.2" +clap.workspace = true +acvm.workspace = true +nargo.workspace = true +const_format.workspace = true +bn254_blackbox_solver.workspace = true +acir.workspace = true + +# Logs +tracing-subscriber.workspace = true +tracing-appender = "0.2.3" + +[dev-dependencies] +rand = "0.8.5" +proptest = "1.2.0" +paste = "1.0.14" diff --git a/tooling/acvm_cli/src/cli/execute_cmd.rs b/tooling/acvm_cli/src/cli/execute_cmd.rs new file mode 100644 index 00000000000..255b6131fd6 --- /dev/null +++ b/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -0,0 +1,78 @@ +use std::io::{self, Write}; + +use acir::circuit::Circuit; +use acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; +use clap::Args; + +use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; +use crate::cli::fs::witness::save_witness_to_dir; +use crate::errors::CliError; +use nargo::ops::{execute_circuit, DefaultForeignCallExecutor}; + +use super::fs::witness::create_output_witness_string; + +/// Executes a circuit to calculate its return value +#[derive(Debug, Clone, Args)] +pub(crate) struct ExecuteCommand { + /// Write the execution witness to named file + #[clap(long, short)] + output_witness: Option, + + /// The name of the toml file which contains the input witness map + #[clap(long, short)] + input_witness: String, + + /// The name of the binary file containing circuit bytecode + #[clap(long, short)] + bytecode: String, + + /// The working directory + #[clap(long, short)] + working_directory: String, + + /// Set to print output witness to stdout + #[clap(long, short, action)] + print: bool, +} + +fn run_command(args: ExecuteCommand) -> Result { + let bytecode = read_bytecode_from_file(&args.working_directory, &args.bytecode)?; + let circuit_inputs = read_inputs_from_file(&args.working_directory, &args.input_witness)?; + let output_witness = execute_program_from_witness(&circuit_inputs, &bytecode, None)?; + let output_witness_string = create_output_witness_string(&output_witness)?; + if args.output_witness.is_some() { + save_witness_to_dir( + &output_witness_string, + &args.working_directory, + &args.output_witness.unwrap(), + )?; + } + Ok(output_witness_string) +} + +pub(crate) fn run(args: ExecuteCommand) -> Result { + let print = args.print; + let output_witness_string = run_command(args)?; + if print { + io::stdout().write_all(output_witness_string.as_bytes()).unwrap(); + } + Ok(output_witness_string) +} + +pub(crate) fn execute_program_from_witness( + inputs_map: &WitnessMap, + bytecode: &[u8], + foreign_call_resolver_url: Option<&str>, +) -> Result { + let blackbox_solver = Bn254BlackBoxSolver::new(); + let circuit: Circuit = Circuit::deserialize_circuit(bytecode) + .map_err(|_| CliError::CircuitDeserializationError())?; + execute_circuit( + &circuit, + inputs_map.clone(), + &blackbox_solver, + &mut DefaultForeignCallExecutor::new(true, foreign_call_resolver_url), + ) + .map_err(CliError::CircuitExecutionError) +} diff --git a/tooling/acvm_cli/src/cli/fs/inputs.rs b/tooling/acvm_cli/src/cli/fs/inputs.rs new file mode 100644 index 00000000000..2a46cfba884 --- /dev/null +++ b/tooling/acvm_cli/src/cli/fs/inputs.rs @@ -0,0 +1,54 @@ +use acir::{ + native_types::{Witness, WitnessMap}, + FieldElement, +}; +use toml::Table; + +use crate::errors::{CliError, FilesystemError}; +use std::{fs::read, path::Path}; + +/// Returns the circuit's parameters parsed from a toml file at the given location +pub(crate) fn read_inputs_from_file>( + working_directory: P, + file_name: &String, +) -> Result { + let file_path = working_directory.as_ref().join(file_name); + if !file_path.exists() { + return Err(CliError::FilesystemError(FilesystemError::MissingTomlFile( + file_name.to_owned(), + file_path, + ))); + } + + let input_string = std::fs::read_to_string(file_path) + .map_err(|_| FilesystemError::InvalidTomlFile(file_name.clone()))?; + let input_map = input_string + .parse::() + .map_err(|_| FilesystemError::InvalidTomlFile(file_name.clone()))?; + let mut witnesses: WitnessMap = WitnessMap::new(); + for (key, value) in input_map.into_iter() { + let index = + Witness(key.trim().parse().map_err(|_| CliError::WitnessIndexError(key.clone()))?); + if !value.is_str() { + return Err(CliError::WitnessValueError(key.clone())); + } + let field = FieldElement::from_hex(value.as_str().unwrap()).unwrap(); + witnesses.insert(index, field); + } + + Ok(witnesses) +} + +/// Returns the circuit's bytecode read from the file at the given location +pub(crate) fn read_bytecode_from_file>( + working_directory: P, + file_name: &String, +) -> Result, FilesystemError> { + let file_path = working_directory.as_ref().join(file_name); + if !file_path.exists() { + return Err(FilesystemError::MissingBytecodeFile(file_name.to_owned(), file_path)); + } + let bytecode: Vec = + read(file_path).map_err(|_| FilesystemError::InvalidBytecodeFile(file_name.clone()))?; + Ok(bytecode) +} diff --git a/tooling/acvm_cli/src/cli/fs/mod.rs b/tooling/acvm_cli/src/cli/fs/mod.rs new file mode 100644 index 00000000000..f23ba06fd8b --- /dev/null +++ b/tooling/acvm_cli/src/cli/fs/mod.rs @@ -0,0 +1,2 @@ +pub(super) mod inputs; +pub(super) mod witness; diff --git a/tooling/acvm_cli/src/cli/fs/witness.rs b/tooling/acvm_cli/src/cli/fs/witness.rs new file mode 100644 index 00000000000..2daaa5a3a58 --- /dev/null +++ b/tooling/acvm_cli/src/cli/fs/witness.rs @@ -0,0 +1,36 @@ +use std::{ + collections::BTreeMap, + fs::File, + io::Write, + path::{Path, PathBuf}, +}; + +use acvm::acir::native_types::WitnessMap; + +use crate::errors::{CliError, FilesystemError}; + +/// Saves the provided output witnesses to a toml file created at the given location +pub(crate) fn save_witness_to_dir>( + output_witness: &String, + witness_dir: P, + file_name: &String, +) -> Result { + let witness_path = witness_dir.as_ref().join(file_name); + + let mut file = File::create(&witness_path) + .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; + write!(file, "{}", output_witness) + .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; + + Ok(witness_path) +} + +/// Creates a toml representation of the provided witness map +pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result { + let mut witness_map: BTreeMap = BTreeMap::new(); + for (key, value) in witnesses.clone().into_iter() { + witness_map.insert(key.0.to_string(), format!("0x{}", value.to_hex())); + } + + toml::to_string(&witness_map).map_err(|_| CliError::OutputWitnessSerializationFailed()) +} diff --git a/tooling/acvm_cli/src/cli/mod.rs b/tooling/acvm_cli/src/cli/mod.rs new file mode 100644 index 00000000000..a610b08ab77 --- /dev/null +++ b/tooling/acvm_cli/src/cli/mod.rs @@ -0,0 +1,41 @@ +use clap::{Parser, Subcommand}; +use color_eyre::eyre; +use const_format::formatcp; + +mod execute_cmd; +mod fs; + +const ACVM_VERSION: &str = env!("CARGO_PKG_VERSION"); + +static VERSION_STRING: &str = formatcp!("version = {}\n", ACVM_VERSION,); + +#[derive(Parser, Debug)] +#[command(name="acvm", author, version=VERSION_STRING, about, long_about = None)] +struct ACVMCli { + #[command(subcommand)] + command: ACVMCommand, +} + +#[non_exhaustive] +#[derive(Subcommand, Clone, Debug)] +enum ACVMCommand { + Execute(execute_cmd::ExecuteCommand), +} + +#[cfg(not(feature = "codegen-docs"))] +pub(crate) fn start_cli() -> eyre::Result<()> { + let ACVMCli { command } = ACVMCli::parse(); + + match command { + ACVMCommand::Execute(args) => execute_cmd::run(args), + }?; + + Ok(()) +} + +#[cfg(feature = "codegen-docs")] +pub(crate) fn start_cli() -> eyre::Result<()> { + let markdown: String = clap_markdown::help_markdown::(); + println!("{markdown}"); + Ok(()) +} diff --git a/tooling/acvm_cli/src/errors.rs b/tooling/acvm_cli/src/errors.rs new file mode 100644 index 00000000000..035388d05f7 --- /dev/null +++ b/tooling/acvm_cli/src/errors.rs @@ -0,0 +1,52 @@ +use nargo::NargoError; +use std::path::PathBuf; +use thiserror::Error; + +#[derive(Debug, Error)] +pub(crate) enum FilesystemError { + #[error( + " Error: cannot find {0} in expected location {1:?}.\n Please generate this file at the expected location." + )] + MissingTomlFile(String, PathBuf), + #[error(" Error: failed to parse toml file {0}.")] + InvalidTomlFile(String), + #[error( + " Error: cannot find {0} in expected location {1:?}.\n Please generate this file at the expected location." + )] + MissingBytecodeFile(String, PathBuf), + + #[error(" Error: failed to read bytecode file {0}.")] + InvalidBytecodeFile(String), + + #[error(" Error: failed to create output witness file {0}.")] + OutputWitnessCreationFailed(String), + + #[error(" Error: failed to write output witness file {0}.")] + OutputWitnessWriteFailed(String), +} + +#[derive(Debug, Error)] +pub(crate) enum CliError { + /// Filesystem errors + #[error(transparent)] + FilesystemError(#[from] FilesystemError), + + /// Error related to circuit deserialization + #[error("Error: failed to deserialize circuit")] + CircuitDeserializationError(), + + /// Error related to circuit execution + #[error(transparent)] + CircuitExecutionError(#[from] NargoError), + + /// Input Witness Value Error + #[error("Error: failed to parse witness value {0}")] + WitnessValueError(String), + + /// Input Witness Index Error + #[error("Error: failed to parse witness index {0}")] + WitnessIndexError(String), + + #[error(" Error: failed to serialize output witness.")] + OutputWitnessSerializationFailed(), +} diff --git a/tooling/acvm_cli/src/main.rs b/tooling/acvm_cli/src/main.rs new file mode 100644 index 00000000000..33cadc73a7c --- /dev/null +++ b/tooling/acvm_cli/src/main.rs @@ -0,0 +1,36 @@ +#![forbid(unsafe_code)] +#![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] +#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] + +mod cli; +mod errors; + +use std::env; + +use tracing_appender::rolling; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; + +fn main() { + // Setup tracing + if let Ok(log_dir) = env::var("ACVM_LOG_DIR") { + let debug_file = rolling::daily(log_dir, "acvm-log"); + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_writer(debug_file) + .with_ansi(false) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_ansi(true) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) + .init(); + } + + if let Err(report) = cli::start_cli() { + eprintln!("{report}"); + std::process::exit(1); + } +} diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index f9effd5d991..0bd2b1c076a 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.24.0"; +const VERSION: &str = "0.26.3"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt index c472e828739..231d4d897a9 100644 --- a/tooling/debugger/ignored-tests.txt +++ b/tooling/debugger/ignored-tests.txt @@ -1,21 +1,13 @@ array_dynamic_blackbox_input -array_sort -assign_ex +bigint bit_shifts_comptime -brillig_cow -brillig_nested_arrays brillig_references brillig_to_bytes_integration debug_logs double_verify_nested_proof double_verify_proof modulus -nested_array_dynamic -nested_array_in_slice -nested_arrays_from_brillig references scalar_mul signed_comparison -simple_2d_array to_bytes_integration -bigint diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index 515edf0bb06..a3ee89263a4 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -8,11 +8,14 @@ use acvm::pwg::{ }; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use nargo::artifacts::debug::DebugArtifact; +use codespan_reporting::files::{Files, SimpleFile}; +use fm::FileId; +use nargo::artifacts::debug::{DebugArtifact, StackFrame}; use nargo::errors::{ExecutionError, Location}; use nargo::NargoError; -use noirc_printable_type::{PrintableType, PrintableValue}; +use noirc_driver::DebugFile; +use std::collections::BTreeMap; use std::collections::{hash_set::Iter, HashSet}; #[derive(Debug)] @@ -29,6 +32,7 @@ pub(super) struct DebugContext<'a, B: BlackBoxFunctionSolver> { foreign_call_executor: Box, debug_artifact: &'a DebugArtifact, breakpoints: HashSet, + source_to_opcodes: BTreeMap>, } impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { @@ -39,12 +43,14 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { initial_witness: WitnessMap, foreign_call_executor: Box, ) -> Self { + let source_to_opcodes = build_source_to_opcode_debug_mappings(debug_artifact); Self { acvm: ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness), brillig_solver: None, foreign_call_executor, debug_artifact, breakpoints: HashSet::new(), + source_to_opcodes, } } @@ -100,10 +106,50 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.debug_artifact .file_map .get(&location.file) - .map(|file| file.path.starts_with("__debug/")) + .map(is_debug_file_in_debug_crate) .unwrap_or(false) } + /// Find an opcode location matching a source code location + // We apply some heuristics here, and there are four possibilities for the + // return value of this function: + // 1. the source location is not found -> None + // 2. an exact unique location is found (very rare) -> Some(opcode_location) + // 3. an exact but not unique location is found, ie. a source location may + // be mapped to multiple opcodes, and those may be disjoint, for example for + // functions called multiple times throughout the program + // -> return the first opcode in program order that matches the source location + // 4. exact location is not found, so an opcode for a nearby source location + // is returned (this again could actually be more than one opcodes) + // -> return the opcode for the next source line that is mapped + pub(super) fn find_opcode_for_source_location( + &self, + file_id: &FileId, + line: i64, + ) -> Option { + let line = line as usize; + let Some(line_to_opcodes) = self.source_to_opcodes.get(file_id) else { + return None; + }; + let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { + Ok(index) => { + // move backwards to find the first opcode which matches the line + let mut index = index; + while index > 0 && line_to_opcodes[index - 1].0 == line { + index -= 1; + } + line_to_opcodes[index].1 + } + Err(index) => { + if index >= line_to_opcodes.len() { + return None; + } + line_to_opcodes[index].1 + } + }; + Some(found_index) + } + /// Returns the callstack in source code locations for the currently /// executing opcode. This can be `None` if the execution finished (and /// `get_current_opcode_location()` returns `None`) or if the opcode is not @@ -128,6 +174,9 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { &self, opcode_location: &OpcodeLocation, ) -> Vec { + // TODO: this assumes we're debugging a program (ie. the DebugArtifact + // will contain a single DebugInfo), but this assumption doesn't hold + // for contracts self.debug_artifact.debug_symbols[0] .opcode_location(opcode_location) .map(|source_locations| { @@ -467,10 +516,14 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } - pub(super) fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { + pub(super) fn get_variables(&self) -> Vec { return self.foreign_call_executor.get_variables(); } + pub(super) fn current_stack_frame(&self) -> Option { + return self.foreign_call_executor.current_stack_frame(); + } + fn breakpoint_reached(&self) -> bool { if let Some(location) = self.get_current_opcode_location() { self.breakpoints.contains(&location) @@ -526,6 +579,52 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } +fn is_debug_file_in_debug_crate(debug_file: &DebugFile) -> bool { + debug_file.path.starts_with("__debug/") +} + +/// Builds a map from FileId to an ordered vector of tuples with line +/// numbers and opcode locations corresponding to those line numbers +fn build_source_to_opcode_debug_mappings( + debug_artifact: &DebugArtifact, +) -> BTreeMap> { + if debug_artifact.debug_symbols.is_empty() { + return BTreeMap::new(); + } + let locations = &debug_artifact.debug_symbols[0].locations; + let simple_files: BTreeMap<_, _> = debug_artifact + .file_map + .iter() + .filter(|(_, debug_file)| !is_debug_file_in_debug_crate(debug_file)) + .map(|(file_id, debug_file)| { + ( + file_id, + SimpleFile::new(debug_file.path.to_str().unwrap(), debug_file.source.as_str()), + ) + }) + .collect(); + + let mut result: BTreeMap> = BTreeMap::new(); + locations.iter().for_each(|(opcode_location, source_locations)| { + source_locations.iter().for_each(|source_location| { + let span = source_location.span; + let file_id = source_location.file; + let Some(file) = simple_files.get(&file_id) else { + return; + }; + let Ok(line_index) = file.line_index((), span.start() as usize) else { + return; + }; + let line_number = line_index + 1; + + result.entry(file_id).or_default().push((line_number, *opcode_location)); + }); + }); + result.iter_mut().for_each(|(_, file_locations)| file_locations.sort_by_key(|x| (x.0, x.1))); + + result +} + #[cfg(test)] mod tests { use super::*; diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index 7e67a26b257..7c722ed0a61 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -5,7 +5,6 @@ use std::str::FromStr; use acvm::acir::circuit::{Circuit, OpcodeLocation}; use acvm::acir::native_types::WitnessMap; use acvm::BlackBoxFunctionSolver; -use codespan_reporting::files::{Files, SimpleFile}; use crate::context::DebugCommandResult; use crate::context::DebugContext; @@ -30,15 +29,16 @@ use nargo::artifacts::debug::DebugArtifact; use fm::FileId; use noirc_driver::CompiledProgram; +type BreakpointId = i64; + pub struct DapSession<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> { server: Server, context: DebugContext<'a, B>, debug_artifact: &'a DebugArtifact, running: bool, - source_to_opcodes: BTreeMap>, - next_breakpoint_id: i64, - instruction_breakpoints: Vec<(OpcodeLocation, i64)>, - source_breakpoints: BTreeMap>, + next_breakpoint_id: BreakpointId, + instruction_breakpoints: Vec<(OpcodeLocation, BreakpointId)>, + source_breakpoints: BTreeMap>, } enum ScopeReferences { @@ -57,8 +57,6 @@ impl From for ScopeReferences { } } -// BTreeMap - impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { pub fn new( server: Server, @@ -67,7 +65,6 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, ) -> Self { - let source_to_opcodes = Self::build_source_to_opcode_debug_mappings(debug_artifact); let context = DebugContext::new( solver, circuit, @@ -79,7 +76,6 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { server, context, debug_artifact, - source_to_opcodes, running: false, next_breakpoint_id: 1, instruction_breakpoints: vec![], @@ -87,46 +83,6 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } } - /// Builds a map from FileId to an ordered vector of tuples with line - /// numbers and opcode locations corresponding to those line numbers - fn build_source_to_opcode_debug_mappings( - debug_artifact: &'a DebugArtifact, - ) -> BTreeMap> { - if debug_artifact.debug_symbols.is_empty() { - return BTreeMap::new(); - } - let locations = &debug_artifact.debug_symbols[0].locations; - let simple_files: BTreeMap<_, _> = debug_artifact - .file_map - .iter() - .map(|(file_id, debug_file)| { - ( - file_id, - SimpleFile::new(debug_file.path.to_str().unwrap(), debug_file.source.as_str()), - ) - }) - .collect(); - - let mut result: BTreeMap> = BTreeMap::new(); - locations.iter().for_each(|(opcode_location, source_locations)| { - if source_locations.is_empty() { - return; - } - let source_location = source_locations[0]; - let span = source_location.span; - let file_id = source_location.file; - let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) - else { - return; - }; - let line_number = line_index + 1; - - result.entry(file_id).or_default().push((line_number, *opcode_location)); - }); - result.iter_mut().for_each(|(_, file_locations)| file_locations.sort_by_key(|x| x.0)); - result - } - fn send_stopped_event(&mut self, reason: StoppedEventReason) -> Result<(), ServerError> { let description = format!("{:?}", &reason); self.server.send_event(Event::Stopped(StoppedEventBody { @@ -230,6 +186,8 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } fn build_stack_trace(&self) -> Vec { + let stack_frames = self.context.get_variables(); + self.context .get_source_call_stack() .iter() @@ -239,9 +197,15 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { self.debug_artifact.location_line_number(*source_location).unwrap(); let column_number = self.debug_artifact.location_column_number(*source_location).unwrap(); + + let name = match stack_frames.get(index) { + Some(frame) => format!("{} {}", frame.function_name, index), + None => format!("frame #{index}"), + }; + StackFrame { id: index as i64, - name: format!("frame #{index}"), + name, source: Some(Source { path: self.debug_artifact.file_map[&source_location.file] .path @@ -422,7 +386,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { Ok(()) } - fn get_next_breakpoint_id(&mut self) -> i64 { + fn get_next_breakpoint_id(&mut self) -> BreakpointId { let id = self.next_breakpoint_id; self.next_breakpoint_id += 1; id @@ -493,36 +457,6 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { found.map(|iter| *iter.0) } - // TODO: there are four possibilities for the return value of this function: - // 1. the source location is not found -> None - // 2. an exact unique location is found -> Some(opcode_location) - // 3. an exact but not unique location is found (ie. a source location may - // be mapped to multiple opcodes, and those may be disjoint, for example for - // functions called multiple times throughout the program) - // 4. exact location is not found, so an opcode for a nearby source location - // is returned (this again could actually be more than one opcodes) - // Case 3 is not supported yet, and 4 is not correctly handled. - fn find_opcode_for_source_location( - &self, - file_id: &FileId, - line: i64, - ) -> Option { - let line = line as usize; - let Some(line_to_opcodes) = self.source_to_opcodes.get(file_id) else { - return None; - }; - let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { - Ok(index) => line_to_opcodes[index].1, - Err(index) => { - if index >= line_to_opcodes.len() { - return None; - } - line_to_opcodes[index].1 - } - }; - Some(found_index) - } - fn map_source_breakpoints(&mut self, args: &SetBreakpointsArguments) -> Vec { let Some(ref source) = &args.source.path else { return vec![]; @@ -539,7 +473,8 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { .iter() .map(|breakpoint| { let line = breakpoint.line; - let Some(location) = self.find_opcode_for_source_location(&file_id, line) else { + let Some(location) = self.context.find_opcode_for_source_location(&file_id, line) + else { return Breakpoint { verified: false, message: Some(String::from( @@ -608,16 +543,20 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } fn build_local_variables(&self) -> Vec { - let mut variables: Vec<_> = self - .context - .get_variables() + let Some(current_stack_frame) = self.context.current_stack_frame() else { + return vec![]; + }; + + let mut variables = current_stack_frame + .variables .iter() .map(|(name, value, _var_type)| Variable { name: String::from(*name), value: format!("{:?}", *value), ..Variable::default() }) - .collect(); + .collect::>(); + variables.sort_by(|a, b| a.name.partial_cmp(&b.name).unwrap()); variables } diff --git a/tooling/debugger/src/foreign_calls.rs b/tooling/debugger/src/foreign_calls.rs index 68c4d3947b0..25f126ff490 100644 --- a/tooling/debugger/src/foreign_calls.rs +++ b/tooling/debugger/src/foreign_calls.rs @@ -3,17 +3,19 @@ use acvm::{ pwg::ForeignCallWaitInfo, }; use nargo::{ - artifacts::debug::{DebugArtifact, DebugVars}, + artifacts::debug::{DebugArtifact, DebugVars, StackFrame}, ops::{DefaultForeignCallExecutor, ForeignCallExecutor, NargoForeignCallResult}, }; -use noirc_errors::debug_info::DebugVarId; -use noirc_printable_type::{ForeignCallError, PrintableType, PrintableValue}; +use noirc_errors::debug_info::{DebugFnId, DebugVarId}; +use noirc_printable_type::ForeignCallError; pub(crate) enum DebugForeignCall { VarAssign, VarDrop, MemberAssign(u32), DerefAssign, + FnEnter, + FnExit, } impl DebugForeignCall { @@ -28,13 +30,16 @@ impl DebugForeignCall { "__debug_var_assign" => Some(DebugForeignCall::VarAssign), "__debug_var_drop" => Some(DebugForeignCall::VarDrop), "__debug_deref_assign" => Some(DebugForeignCall::DerefAssign), + "__debug_fn_enter" => Some(DebugForeignCall::FnEnter), + "__debug_fn_exit" => Some(DebugForeignCall::FnExit), _ => None, } } } pub trait DebugForeignCallExecutor: ForeignCallExecutor { - fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)>; + fn get_variables(&self) -> Vec; + fn current_stack_frame(&self) -> Option; } pub struct DefaultDebugForeignCallExecutor { @@ -57,23 +62,33 @@ impl DefaultDebugForeignCallExecutor { } pub fn load_artifact(&mut self, artifact: &DebugArtifact) { - artifact.debug_symbols.iter().for_each(|info| { - self.debug_vars.insert_variables(&info.variables); - self.debug_vars.insert_types(&info.types); - }); + // TODO: handle loading from the correct DebugInfo when we support + // debugging contracts + let Some(info) = artifact.debug_symbols.get(0) else { + return; + }; + self.debug_vars.insert_debug_info(info); } } impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor { - fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { + fn get_variables(&self) -> Vec { self.debug_vars.get_variables() } + + fn current_stack_frame(&self) -> Option { + self.debug_vars.current_stack_frame() + } } fn debug_var_id(value: &Value) -> DebugVarId { DebugVarId(value.to_u128() as u32) } +fn debug_fn_id(value: &Value) -> DebugFnId { + DebugFnId(value.to_u128() as u32) +} + impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { fn execute( &mut self, @@ -136,6 +151,19 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { } Ok(ForeignCallResult::default().into()) } + Some(DebugForeignCall::FnEnter) => { + let fcp_fn_id = &foreign_call.inputs[0]; + let ForeignCallParam::Single(fn_id_value) = fcp_fn_id else { + panic!("unexpected foreign call parameter in fn enter: {fcp_fn_id:?}") + }; + let fn_id = debug_fn_id(fn_id_value); + self.debug_vars.push_fn(fn_id); + Ok(ForeignCallResult::default().into()) + } + Some(DebugForeignCall::FnExit) => { + self.debug_vars.pop_fn(); + Ok(ForeignCallResult::default().into()) + } None => self.executor.execute(foreign_call), } } diff --git a/tooling/debugger/src/repl.rs b/tooling/debugger/src/repl.rs index 8441dbde9be..41dbf604f99 100644 --- a/tooling/debugger/src/repl.rs +++ b/tooling/debugger/src/repl.rs @@ -337,11 +337,13 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } pub fn show_vars(&self) { - let vars = self.context.get_variables(); - for (var_name, value, var_type) in vars.iter() { - let printable_value = - PrintableValueDisplay::Plain((*value).clone(), (*var_type).clone()); - println!("{var_name}:{var_type:?} = {}", printable_value); + for frame in self.context.get_variables() { + println!("{}({})", frame.function_name, frame.function_params.join(", ")); + for (var_name, value, var_type) in frame.variables.iter() { + let printable_value = + PrintableValueDisplay::Plain((*value).clone(), (*var_type).clone()); + println!(" {var_name}:{var_type:?} = {}", printable_value); + } } } @@ -530,7 +532,7 @@ pub fn run( .add( "vars", command! { - "show variable values available at this point in execution", + "show variables for each function scope available at this point in execution", () => || { ref_context.borrow_mut().show_vars(); Ok(CommandStatus::Done) diff --git a/tooling/debugger/src/source_code_printer.rs b/tooling/debugger/src/source_code_printer.rs index b5ffdb12d01..e298eb8aadd 100644 --- a/tooling/debugger/src/source_code_printer.rs +++ b/tooling/debugger/src/source_code_printer.rs @@ -30,7 +30,7 @@ struct LocationPrintContext { // Given a DebugArtifact and an OpcodeLocation, prints all the source code // locations the OpcodeLocation maps to, with some surrounding context and // visual aids to highlight the location itself. -pub(crate) fn print_source_code_location(debug_artifact: &DebugArtifact, locations: &[Location]) { +pub(super) fn print_source_code_location(debug_artifact: &DebugArtifact, locations: &[Location]) { let locations = locations.iter(); for loc in locations { @@ -269,8 +269,12 @@ mod tests { let mut opcode_locations = BTreeMap::>::new(); opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); - let debug_symbols = - vec![DebugInfo::new(opcode_locations, BTreeMap::default(), BTreeMap::default())]; + let debug_symbols = vec![DebugInfo::new( + opcode_locations, + BTreeMap::default(), + BTreeMap::default(), + BTreeMap::default(), + )]; let debug_artifact = DebugArtifact::new(debug_symbols, &fm); let location_rendered: Vec<_> = render_location(&debug_artifact, &loc).collect(); diff --git a/tooling/debugger/tests/debug.rs b/tooling/debugger/tests/debug.rs index 4cb678192b8..143ee7987f8 100644 --- a/tooling/debugger/tests/debug.rs +++ b/tooling/debugger/tests/debug.rs @@ -12,7 +12,7 @@ mod tests { let nargo_bin = cargo_bin("nargo").into_os_string().into_string().expect("Cannot parse nargo path"); - let mut dbg_session = spawn_bash(Some(10000)).expect("Could not start bash session"); + let mut dbg_session = spawn_bash(Some(15000)).expect("Could not start bash session"); // Set backend to `/dev/null` to force an error if nargo tries to speak to a backend. dbg_session diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index 0b88d814265..1844a3d9bf0 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -84,7 +84,7 @@ fn on_test_run_request_inner( let test_result = run_test( &state.solver, &mut context, - test_function, + &test_function, false, None, &CompileOptions::default(), diff --git a/tooling/nargo/src/artifacts/contract.rs b/tooling/nargo/src/artifacts/contract.rs index d928b09fcb9..020ce49662f 100644 --- a/tooling/nargo/src/artifacts/contract.rs +++ b/tooling/nargo/src/artifacts/contract.rs @@ -1,6 +1,6 @@ use acvm::acir::circuit::Circuit; use noirc_abi::{Abi, ContractEvent}; -use noirc_driver::{CompiledContract, ContractFunction, ContractFunctionType}; +use noirc_driver::{CompiledContract, ContractFunction}; use serde::{Deserialize, Serialize}; use noirc_driver::DebugFile; @@ -43,9 +43,9 @@ impl From for ContractArtifact { pub struct ContractFunctionArtifact { pub name: String, - pub function_type: ContractFunctionType, + pub is_unconstrained: bool, - pub is_internal: bool, + pub custom_attributes: Vec, pub abi: Abi, @@ -66,8 +66,8 @@ impl From for ContractFunctionArtifact { fn from(func: ContractFunction) -> Self { ContractFunctionArtifact { name: func.name, - function_type: func.function_type, - is_internal: func.is_internal, + is_unconstrained: func.is_unconstrained, + custom_attributes: func.custom_attributes, abi: func.abi, bytecode: func.bytecode, debug_symbols: func.debug, diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index a249ecb03ad..fbdf59805c9 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -8,7 +8,7 @@ use std::{ ops::Range, }; -pub use super::debug_vars::DebugVars; +pub use super::debug_vars::{DebugVars, StackFrame}; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -231,8 +231,12 @@ mod tests { let mut opcode_locations = BTreeMap::>::new(); opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); - let debug_symbols = - vec![DebugInfo::new(opcode_locations, BTreeMap::default(), BTreeMap::default())]; + let debug_symbols = vec![DebugInfo::new( + opcode_locations, + BTreeMap::default(), + BTreeMap::default(), + BTreeMap::default(), + )]; let debug_artifact = DebugArtifact::new(debug_symbols, &fm); let location_in_line = debug_artifact.location_in_line(loc).expect("Expected a range"); diff --git a/tooling/nargo/src/artifacts/debug_vars.rs b/tooling/nargo/src/artifacts/debug_vars.rs index 20f2637f7d6..66568bec833 100644 --- a/tooling/nargo/src/artifacts/debug_vars.rs +++ b/tooling/nargo/src/artifacts/debug_vars.rs @@ -1,54 +1,85 @@ use acvm::brillig_vm::brillig::Value; use noirc_errors::debug_info::{ - DebugTypeId, DebugTypes, DebugVarId, DebugVariable, DebugVariables, + DebugFnId, DebugFunction, DebugInfo, DebugTypeId, DebugVarId, DebugVariable, }; use noirc_printable_type::{decode_value, PrintableType, PrintableValue}; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; #[derive(Debug, Default, Clone)] pub struct DebugVars { variables: HashMap, + functions: HashMap, types: HashMap, - active: HashSet, - values: HashMap, + frames: Vec<(DebugFnId, HashMap)>, +} + +pub struct StackFrame<'a> { + pub function_name: &'a str, + pub function_params: Vec<&'a str>, + pub variables: Vec<(&'a str, &'a PrintableValue, &'a PrintableType)>, } impl DebugVars { - pub fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { - self.active - .iter() - .filter_map(|var_id| { - self.variables.get(var_id).and_then(|debug_var| { - let Some(value) = self.values.get(var_id) else { - return None; - }; - let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { - return None; - }; - Some((debug_var.name.as_str(), value, ptype)) - }) - }) - .collect() + pub fn insert_debug_info(&mut self, info: &DebugInfo) { + self.variables.extend(info.variables.clone()); + self.types.extend(info.types.clone()); + self.functions.extend(info.functions.clone()); + } + + pub fn get_variables(&self) -> Vec { + self.frames.iter().map(|(fn_id, frame)| self.build_stack_frame(fn_id, frame)).collect() } - pub fn insert_variables(&mut self, vars: &DebugVariables) { - self.variables.extend(vars.clone()); + pub fn current_stack_frame(&self) -> Option { + self.frames.last().map(|(fn_id, frame)| self.build_stack_frame(fn_id, frame)) } - pub fn insert_types(&mut self, types: &DebugTypes) { - self.types.extend(types.clone()); + fn lookup_var(&self, var_id: DebugVarId) -> Option<(&str, &PrintableType)> { + self.variables.get(&var_id).and_then(|debug_var| { + let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { + return None; + }; + Some((debug_var.name.as_str(), ptype)) + }) + } + + fn build_stack_frame<'a>( + &'a self, + fn_id: &DebugFnId, + frame: &'a HashMap, + ) -> StackFrame { + let debug_fn = &self.functions.get(fn_id).expect("failed to find function metadata"); + + let params: Vec<&str> = + debug_fn.arg_names.iter().map(|arg_name| arg_name.as_str()).collect(); + let vars: Vec<(&str, &PrintableValue, &PrintableType)> = frame + .iter() + .filter_map(|(var_id, var_value)| { + self.lookup_var(*var_id).map(|(name, typ)| (name, var_value, typ)) + }) + .collect(); + + StackFrame { + function_name: debug_fn.name.as_str(), + function_params: params, + variables: vars, + } } pub fn assign_var(&mut self, var_id: DebugVarId, values: &[Value]) { - self.active.insert(var_id); let type_id = &self.variables.get(&var_id).unwrap().debug_type_id; let ptype = self.types.get(type_id).unwrap(); - self.values.insert(var_id, decode_value(&mut values.iter().map(|v| v.to_field()), ptype)); + + self.frames + .last_mut() + .expect("unexpected empty stack frames") + .1 + .insert(var_id, decode_value(&mut values.iter().map(|v| v.to_field()), ptype)); } pub fn assign_field(&mut self, var_id: DebugVarId, indexes: Vec, values: &[Value]) { - let mut cursor: &mut PrintableValue = self - .values + let current_frame = &mut self.frames.last_mut().expect("unexpected empty stack frames").1; + let mut cursor: &mut PrintableValue = current_frame .get_mut(&var_id) .unwrap_or_else(|| panic!("value unavailable for var_id {var_id:?}")); let cursor_type_id = &self @@ -102,7 +133,6 @@ impl DebugVars { }; } *cursor = decode_value(&mut values.iter().map(|v| v.to_field()), cursor_type); - self.active.insert(var_id); } pub fn assign_deref(&mut self, _var_id: DebugVarId, _values: &[Value]) { @@ -114,6 +144,14 @@ impl DebugVars { } pub fn drop_var(&mut self, var_id: DebugVarId) { - self.active.remove(&var_id); + self.frames.last_mut().expect("unexpected empty stack frames").1.remove(&var_id); + } + + pub fn push_fn(&mut self, fn_id: DebugFnId) { + self.frames.push((fn_id, HashMap::default())); + } + + pub fn pop_fn(&mut self) { + self.frames.pop(); } } diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index 92c09ec889e..8ddcb5cf8d2 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -14,10 +14,16 @@ pub enum TestStatus { CompileError(FileDiagnostic), } +impl TestStatus { + pub fn failed(&self) -> bool { + !matches!(self, TestStatus::Pass) + } +} + pub fn run_test( blackbox_solver: &B, context: &mut Context, - test_function: TestFunction, + test_function: &TestFunction, show_output: bool, foreign_call_resolver_url: Option<&str>, config: &CompileOptions, @@ -45,7 +51,7 @@ pub fn run_test( /// that a constraint was never satisfiable. /// An example of this is the program `assert(false)` /// In that case, we check if the test function should fail, and if so, we return `TestStatus::Pass`. -fn test_status_program_compile_fail(err: CompileError, test_function: TestFunction) -> TestStatus { +fn test_status_program_compile_fail(err: CompileError, test_function: &TestFunction) -> TestStatus { // The test has failed compilation, but it should never fail. Report error. if !test_function.should_fail() { return TestStatus::CompileError(err.into()); @@ -70,7 +76,7 @@ fn test_status_program_compile_fail(err: CompileError, test_function: TestFuncti /// We now check whether execution passed/failed and whether it should have /// passed/failed to determine the test status. fn test_status_program_compile_pass( - test_function: TestFunction, + test_function: &TestFunction, debug: DebugInfo, circuit_execution: Result, ) -> TestStatus { @@ -109,7 +115,7 @@ fn test_status_program_compile_pass( } fn check_expected_failure_message( - test_function: TestFunction, + test_function: &TestFunction, failed_assertion: Option, error_diagnostic: Option, ) -> TestStatus { diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index 68660d62d23..2828aaf01eb 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -5,17 +5,18 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use fm::FileManager; use nargo::{ - insert_all_files_for_workspace_into_file_manager, - ops::{run_test, TestStatus}, - package::Package, - parse_all, prepare_package, + insert_all_files_for_workspace_into_file_manager, ops::TestStatus, package::Package, parse_all, + prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::{ graph::CrateName, hir::{FunctionNameMatch, ParsedFiles}, }; +use rayon::prelude::{IntoParallelIterator, ParallelBridge, ParallelIterator}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; @@ -82,15 +83,13 @@ pub(crate) fn run( None => FunctionNameMatch::Anything, }; - let blackbox_solver = Bn254BlackBoxSolver::new(); - let test_reports: Vec> = workspace .into_iter() + .par_bridge() .map(|package| { - run_tests( + run_tests::( &workspace_file_manager, &parsed_files, - &blackbox_solver, package, pattern, args.show_output, @@ -116,24 +115,95 @@ pub(crate) fn run( }; } - if test_report.iter().any(|(_, status)| matches!(status, TestStatus::Fail { .. })) { + if test_report.iter().any(|(_, status)| status.failed()) { Err(CliError::Generic(String::new())) } else { Ok(()) } } -#[allow(clippy::too_many_arguments)] -fn run_tests( +fn run_tests( file_manager: &FileManager, parsed_files: &ParsedFiles, - blackbox_solver: &S, package: &Package, fn_name: FunctionNameMatch, show_output: bool, foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, ) -> Result, CliError> { + let test_functions = + get_tests_in_package(file_manager, parsed_files, package, fn_name, compile_options)?; + + let count_all = test_functions.len(); + + let plural = if count_all == 1 { "" } else { "s" }; + println!("[{}] Running {count_all} test function{plural}", package.name); + + let test_report: Vec<(String, TestStatus)> = test_functions + .into_par_iter() + .map(|test_name| { + let status = run_test::( + file_manager, + parsed_files, + package, + &test_name, + show_output, + foreign_call_resolver_url, + compile_options, + ); + + (test_name, status) + }) + .collect(); + + display_test_report(file_manager, package, compile_options, &test_report)?; + Ok(test_report) +} + +fn run_test( + file_manager: &FileManager, + parsed_files: &ParsedFiles, + package: &Package, + fn_name: &str, + show_output: bool, + foreign_call_resolver_url: Option<&str>, + compile_options: &CompileOptions, +) -> TestStatus { + // This is really hacky but we can't share `Context` or `S` across threads. + // We then need to construct a separate copy for each test. + + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); + check_crate( + &mut context, + crate_id, + compile_options.deny_warnings, + compile_options.disable_macros, + ) + .expect("Any errors should have occurred when collecting test functions"); + + let test_functions = context + .get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Exact(fn_name)); + let (_, test_function) = test_functions.first().expect("Test function should exist"); + + let blackbox_solver = S::default(); + + nargo::ops::run_test( + &blackbox_solver, + &mut context, + test_function, + show_output, + foreign_call_resolver_url, + compile_options, + ) +} + +fn get_tests_in_package( + file_manager: &FileManager, + parsed_files: &ParsedFiles, + package: &Package, + fn_name: FunctionNameMatch, + compile_options: &CompileOptions, +) -> Result, CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, @@ -143,30 +213,27 @@ fn run_tests( compile_options.silence_warnings, )?; - let test_functions = context.get_all_test_functions_in_crate_matching(&crate_id, fn_name); - let count_all = test_functions.len(); - - let plural = if count_all == 1 { "" } else { "s" }; - println!("[{}] Running {count_all} test function{plural}", package.name); + Ok(context + .get_all_test_functions_in_crate_matching(&crate_id, fn_name) + .into_iter() + .map(|(test_name, _)| test_name) + .collect()) +} +fn display_test_report( + file_manager: &FileManager, + package: &Package, + compile_options: &CompileOptions, + test_report: &[(String, TestStatus)], +) -> Result<(), CliError> { let writer = StandardStream::stderr(ColorChoice::Always); let mut writer = writer.lock(); - let mut test_report: Vec<(String, TestStatus)> = Vec::new(); - for (test_name, test_function) in test_functions { + for (test_name, test_status) in test_report { write!(writer, "[{}] Testing {test_name}... ", package.name) .expect("Failed to write to stderr"); writer.flush().expect("Failed to flush writer"); - let test_status = run_test( - blackbox_solver, - &mut context, - test_function, - show_output, - foreign_call_resolver_url, - compile_options, - ); - match &test_status { TestStatus::Pass { .. } => { writer @@ -181,7 +248,7 @@ fn run_tests( writeln!(writer, "FAIL\n{message}\n").expect("Failed to write to stderr"); if let Some(diag) = error_diagnostic { noirc_errors::reporter::report_all( - context.file_manager.as_file_map(), + file_manager.as_file_map(), &[diag.clone()], compile_options.deny_warnings, compile_options.silence_warnings, @@ -190,23 +257,21 @@ fn run_tests( } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( - context.file_manager.as_file_map(), + file_manager.as_file_map(), &[err.clone()], compile_options.deny_warnings, compile_options.silence_warnings, ); } } - - test_report.push((test_name, test_status)); - writer.reset().expect("Failed to reset writer"); } write!(writer, "[{}] ", package.name).expect("Failed to write to stderr"); - let count_failed = - test_report.iter().filter(|(_, status)| !matches!(status, TestStatus::Pass)).count(); + let count_all = test_report.len(); + let count_failed = test_report.iter().filter(|(_, status)| status.failed()).count(); + let plural = if count_all == 1 { "" } else { "s" }; if count_failed == 0 { writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).expect("Failed to set color"); write!(writer, "{count_all} test{plural} passed").expect("Failed to write to stderr"); @@ -231,5 +296,5 @@ fn run_tests( writer.reset().expect("Failed to reset writer"); } - Ok(test_report) + Ok(()) } diff --git a/tooling/noir_codegen/package.json b/tooling/noir_codegen/package.json index 6bb9d06f718..68ee7184928 100644 --- a/tooling/noir_codegen/package.json +++ b/tooling/noir_codegen/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.24.0", + "version": "0.25.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index 6f1899fae52..0a40d300ea4 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.24.0", + "version": "0.25.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 28c3609fd14..d94999f324b 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.24.0", + "version": "0.25.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.24.0", + "@aztec/bb.js": "0.26.3", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/tooling/noir_js_types/package.json b/tooling/noir_js_types/package.json index a3b5c85897a..d853813303d 100644 --- a/tooling/noir_js_types/package.json +++ b/tooling/noir_js_types/package.json @@ -4,7 +4,7 @@ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.24.0", + "version": "0.25.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index 05fcc270402..8fb9eb314a5 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.24.0", + "version": "0.25.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/yarn.lock b/yarn.lock index f5f3a29f08a..49485193532 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.24.0": - version: 0.24.0 - resolution: "@aztec/bb.js@npm:0.24.0" +"@aztec/bb.js@npm:0.26.3": + version: 0.26.3 + resolution: "@aztec/bb.js@npm:0.26.3" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: a086dabf30084cfa526e512148b9c02f0a0770dcc19b7dca4af9a3e98612b716acc7eaac6b52c0f12d985932e866d1cb9e534ded6ac9d747f3dd021afe25de27 + checksum: 74c2b7ef5405f56472cf7c41d1c13261df07b1d5019e3ede9b63d218378e0fb73ccf5c52f1cc524505efad5799b347b497349d7c9b6fe82286014b1574f12309 languageName: node linkType: hard @@ -4396,7 +4396,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.24.0 + "@aztec/bb.js": 0.26.3 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3