diff --git a/.noir-sync-commit b/.noir-sync-commit index 64da23777cd..f45e08b4171 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -1d96937a8e94a91c0c17c97102498d067fca76c3 +053d5e8879865c25ac2bfcfd206a6b729d328b9c diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index b99f5b35be6..629248c8136 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -185,7 +185,7 @@ dependencies = [ "ark-std", "derivative", "hashbrown 0.13.2", - "itertools", + "itertools 0.10.5", "num-traits", "zeroize", ] @@ -202,7 +202,7 @@ dependencies = [ "ark-std", "derivative", "digest", - "itertools", + "itertools 0.10.5", "num-bigint", "num-traits", "paste", @@ -291,6 +291,15 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + [[package]] name = "autocfg" version = "1.1.0" @@ -355,6 +364,33 @@ dependencies = [ "serde", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" + [[package]] name = "bitmaps" version = "2.1.0" @@ -464,7 +500,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets", + "windows-targets 0.52.0", ] [[package]] @@ -587,6 +623,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -686,6 +728,27 @@ dependencies = [ "subtle", ] +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + [[package]] name = "ecdsa" version = "0.14.8" @@ -724,6 +787,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ena" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" +dependencies = [ + "log", +] + [[package]] name = "env_filter" version = "0.1.0" @@ -970,6 +1042,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.10" @@ -1018,12 +1099,63 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "lalrpop" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" +dependencies = [ + "ascii-canvas", + "bit-set", + "ena", + "itertools 0.11.0", + "lalrpop-util", + "petgraph", + "pico-args", + "regex", + "regex-syntax", + "string_cache", + "term", + "tiny-keccak", + "unicode-xid", + "walkdir", +] + +[[package]] +name = "lalrpop-util" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" +dependencies = [ + "regex-automata", +] + [[package]] name = "libc" version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.5.0", + "libc", +] + +[[package]] +name = "lock_api" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +dependencies = [ + "autocfg", + "scopeguard", +] + [[package]] name = "log" version = "0.4.20" @@ -1056,6 +1188,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + [[package]] name = "noirc_abi" version = "0.27.0" @@ -1136,6 +1274,8 @@ dependencies = [ "chumsky", "fm", "iter-extended", + "lalrpop", + "lalrpop-util", "noirc_errors", "noirc_printable_type", "petgraph", @@ -1209,6 +1349,29 @@ dependencies = [ "sha2", ] +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.5", +] + [[package]] name = "paste" version = "1.0.14" @@ -1225,6 +1388,21 @@ dependencies = [ "indexmap 2.2.1", ] +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + [[package]] name = "pin-project-lite" version = "0.2.13" @@ -1253,6 +1431,12 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + [[package]] name = "proc-macro2" version = "1.0.78" @@ -1309,6 +1493,26 @@ dependencies = [ "rand_core", ] +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + [[package]] name = "regex" version = "1.10.3" @@ -1398,6 +1602,12 @@ dependencies = [ "semver", ] +[[package]] +name = "rustversion" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" + [[package]] name = "ryu" version = "1.0.16" @@ -1466,6 +1676,12 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + [[package]] name = "sec1" version = "0.3.0" @@ -1595,6 +1811,12 @@ dependencies = [ "rand_core", ] +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + [[package]] name = "sized-chunks" version = "0.6.5" @@ -1639,6 +1861,19 @@ dependencies = [ "der", ] +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared", + "precomputed-hash", +] + [[package]] name = "strsim" version = "0.10.0" @@ -1673,6 +1908,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -1731,6 +1977,15 @@ dependencies = [ "time-core", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "toml" version = "0.7.8" @@ -1820,6 +2075,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + [[package]] name = "utf8parse" version = "0.2.1" @@ -1939,7 +2200,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets", + "windows-targets 0.52.0", ] [[package]] @@ -1948,7 +2209,22 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets", + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] @@ -1957,51 +2233,93 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + [[package]] name = "windows_aarch64_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + [[package]] name = "windows_i686_gnu" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + [[package]] name = "windows_i686_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + [[package]] name = "windows_x86_64_gnu" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + [[package]] name = "windows_x86_64_gnullvm" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + [[package]] name = "windows_x86_64_msvc" version = "0.52.0" diff --git a/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml b/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml index 0b2855fa834..cdd7a064a8d 100644 --- a/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml +++ b/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml @@ -112,6 +112,10 @@ jobs: # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + - name: Checkout + if: ${{ failure() }} + uses: actions/checkout@v4 + # Raise an issue if the tests failed - name: Alert on failed publish uses: JasonEtco/create-an-issue@v2 @@ -122,4 +126,4 @@ jobs: WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: update_existing: true - filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file + filename: .github/ACVM_NOT_PUBLISHABLE.md diff --git a/noir/noir-repo/.gitignore b/noir/noir-repo/.gitignore index 9a829afab8b..2c877a4d02c 100644 --- a/noir/noir-repo/.gitignore +++ b/noir/noir-repo/.gitignore @@ -4,6 +4,7 @@ examples/**/target/ examples/9 node_modules pkg/ +.idea # Yarn .pnp.* diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index ee7d1d7d024..e6072ffb82b 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -269,7 +269,7 @@ dependencies = [ "ark-std", "derivative", "hashbrown 0.13.2", - "itertools", + "itertools 0.10.5", "num-traits", "zeroize", ] @@ -286,7 +286,7 @@ dependencies = [ "ark-std", "derivative", "digest", - "itertools", + "itertools 0.10.5", "num-bigint", "num-traits", "paste", @@ -375,6 +375,15 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + [[package]] name = "assert_cmd" version = "2.0.12" @@ -607,11 +616,14 @@ dependencies = [ "ark-ec", "ark-ff", "cfg-if 1.0.0", + "criterion", "getrandom 0.2.10", + "hex", "js-sys", + "lazy_static", "noir_grumpkin", "num-bigint", - "num-traits", + "pprof", "thiserror", "wasm-bindgen-futures", "wasmer", @@ -1179,7 +1191,7 @@ dependencies = [ "clap", "criterion-plot", "is-terminal", - "itertools", + "itertools 0.10.5", "num-traits", "once_cell", "oorandom", @@ -1200,7 +1212,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] @@ -1256,6 +1268,12 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -1527,6 +1545,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ena" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" +dependencies = [ + "log", +] + [[package]] name = "encode_unicode" version = "0.3.6" @@ -2365,6 +2392,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" @@ -2535,6 +2571,37 @@ dependencies = [ "libc", ] +[[package]] +name = "lalrpop" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" +dependencies = [ + "ascii-canvas", + "bit-set", + "ena", + "itertools 0.11.0", + "lalrpop-util", + "petgraph", + "pico-args", + "regex", + "regex-syntax 0.8.2", + "string_cache", + "term", + "tiny-keccak", + "unicode-xid", + "walkdir", +] + +[[package]] +name = "lalrpop-util" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" +dependencies = [ + "regex-automata 0.4.5", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -2857,6 +2924,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + [[package]] name = "nibble_vec" version = "0.1.0" @@ -3088,6 +3161,8 @@ dependencies = [ "chumsky", "fm", "iter-extended", + "lalrpop", + "lalrpop-util", "noirc_errors", "noirc_printable_type", "petgraph", @@ -3387,6 +3462,12 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + [[package]] name = "pin-project-lite" version = "0.2.13" @@ -3471,6 +3552,12 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + [[package]] name = "predicates" version = "2.1.5" @@ -3479,7 +3566,7 @@ checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" dependencies = [ "difflib", "float-cmp", - "itertools", + "itertools 0.10.5", "normalize-line-endings", "predicates-core", "regex", @@ -3493,7 +3580,7 @@ checksum = "09963355b9f467184c04017ced4a2ba2d75cbcb4e7462690d388233253d4b1a9" dependencies = [ "anstyle", "difflib", - "itertools", + "itertools 0.10.5", "predicates-core", ] @@ -4613,6 +4700,19 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb" +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot 0.12.1", + "phf_shared", + "precomputed-hash", +] + [[package]] name = "strsim" version = "0.10.0" @@ -4855,6 +4955,15 @@ dependencies = [ "time-core", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -5276,9 +5385,9 @@ dependencies = [ [[package]] name = "walkdir" -version = "2.3.3" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 188205b124c..d655d136bc8 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -89,6 +89,16 @@ impl Circuit { } } +#[derive(Debug, Copy, Clone)] +/// The opcode location for a call to a separate ACIR circuit +/// This includes the function index of the caller within a [program][Program] +/// and the index in the callers ACIR to the specific call opcode. +/// This is only resolved and set during circuit execution. +pub struct ResolvedOpcodeLocation { + pub acir_function_index: usize, + pub opcode_location: OpcodeLocation, +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] /// Opcodes are locatable so that callers can /// map opcodes to debug information related to their context. diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/bigint.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/bigint.rs index f094bb1ba20..a47afa5049a 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/bigint.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/bigint.rs @@ -105,7 +105,7 @@ impl BigIntSolver { } BlackBoxFunc::BigIntMul => lhs * rhs, BlackBoxFunc::BigIntDiv => { - lhs * rhs.modpow(&(&modulus - BigUint::from(1_u32)), &modulus) + lhs * rhs.modpow(&(&modulus - BigUint::from(2_u32)), &modulus) } //TODO ensure that modulus is prime _ => unreachable!("ICE - bigint_op must be called for an operation"), }; diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml index 318833180ea..448642e1a9e 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -16,8 +16,9 @@ repository.workspace = true acir.workspace = true acvm_blackbox_solver.workspace = true thiserror.workspace = true -num-traits.workspace = true cfg-if = "1.0.0" +hex.workspace = true +lazy_static = "1.4" # BN254 fixed base scalar multiplication solver grumpkin = { version = "0.1.0", package = "noir_grumpkin", features = ["std"] } @@ -38,6 +39,18 @@ js-sys.workspace = true getrandom.workspace = true wasmer = "4.2.6" +[dev-dependencies] +criterion = "0.5.0" +pprof = { version = "0.12", features = [ + "flamegraph", + "frame-pointer", + "criterion", +] } + +[[bench]] +name = "criterion" +harness = false + [features] default = ["bn254"] bn254 = ["acir/bn254"] diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs new file mode 100644 index 00000000000..eb529ed2c11 --- /dev/null +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs @@ -0,0 +1,21 @@ +use criterion::{criterion_group, criterion_main, Criterion}; +use std::{hint::black_box, time::Duration}; + +use acir::FieldElement; +use bn254_blackbox_solver::poseidon2_permutation; + +use pprof::criterion::{Output, PProfProfiler}; + +fn bench_poseidon2(c: &mut Criterion) { + let inputs = [FieldElement::zero(); 4]; + + c.bench_function("poseidon2", |b| b.iter(|| poseidon2_permutation(black_box(&inputs), 4))); +} + +criterion_group!( + name = benches; + config = Criterion::default().sample_size(40).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); + targets = bench_poseidon2 +); + +criterion_main!(benches); diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs index 5e68c7d4030..cd91c290f49 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs @@ -47,17 +47,29 @@ pub fn fixed_base_scalar_mul( } } +fn create_point(x: FieldElement, y: FieldElement) -> Result { + let point = grumpkin::SWAffine::new_unchecked(x.into_repr(), y.into_repr()); + if !point.is_on_curve() { + return Err(format!("Point ({}, {}) is not on curve", x.to_hex(), y.to_hex())); + }; + if !point.is_in_correct_subgroup_assuming_on_curve() { + return Err(format!("Point ({}, {}) is not in correct subgroup", x.to_hex(), y.to_hex())); + }; + Ok(point) +} + pub fn embedded_curve_add( input1_x: FieldElement, input1_y: FieldElement, input2_x: FieldElement, input2_y: FieldElement, ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - let mut point1 = grumpkin::SWAffine::new(input1_x.into_repr(), input1_y.into_repr()); - let point2 = grumpkin::SWAffine::new(input2_x.into_repr(), input2_y.into_repr()); - let res = point1 + point2; - point1 = res.into(); - if let Some((res_x, res_y)) = point1.xy() { + let point1 = create_point(input1_x, input1_y) + .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::EmbeddedCurveAdd, e))?; + let point2 = create_point(input2_x, input2_y) + .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::EmbeddedCurveAdd, e))?; + let res = grumpkin::SWAffine::from(point1 + point2); + if let Some((res_x, res_y)) = res.xy() { Ok((FieldElement::from_repr(*res_x), FieldElement::from_repr(*res_y))) } else { Err(BlackBoxResolutionError::Failed( @@ -72,6 +84,7 @@ mod grumpkin_fixed_base_scalar_mul { use ark_ff::BigInteger; use super::*; + #[test] fn smoke_test() -> Result<(), BlackBoxResolutionError> { let input = FieldElement::one(); @@ -84,6 +97,7 @@ mod grumpkin_fixed_base_scalar_mul { assert_eq!(y, res.1.to_hex()); Ok(()) } + #[test] fn low_high_smoke_test() -> Result<(), BlackBoxResolutionError> { let low = FieldElement::one(); @@ -103,9 +117,9 @@ mod grumpkin_fixed_base_scalar_mul { let max_limb = FieldElement::from(u128::MAX); let invalid_limb = max_limb + FieldElement::one(); - let expected_error = Err(BlackBoxResolutionError::Failed( + let expected_error = Err(BlackBoxResolutionError::Failed( BlackBoxFunc::FixedBaseScalarMul, - "Limb 0000000000000000000000000000000100000000000000000000000000000000 is not less than 2^128".into() + "Limb 0000000000000000000000000000000100000000000000000000000000000000 is not less than 2^128".into(), )); let res = fixed_base_scalar_mul(&invalid_limb, &FieldElement::zero()); @@ -128,7 +142,23 @@ mod grumpkin_fixed_base_scalar_mul { res, Err(BlackBoxResolutionError::Failed( BlackBoxFunc::FixedBaseScalarMul, - "30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47 is not a valid grumpkin scalar".into() + "30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47 is not a valid grumpkin scalar".into(), + )) + ); + } + + #[test] + fn rejects_addition_of_points_not_in_curve() { + let x = FieldElement::from(1u128); + let y = FieldElement::from(2u128); + + let res = embedded_curve_add(x, y, x, y); + + assert_eq!( + res, + Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::EmbeddedCurveAdd, + "Point (0000000000000000000000000000000000000000000000000000000000000001, 0000000000000000000000000000000000000000000000000000000000000002) is not on curve".into(), )) ); } diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs index f7c303888e8..25b10252a78 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -10,7 +10,7 @@ mod poseidon2; mod wasm; pub use fixed_base_scalar_mul::{embedded_curve_add, fixed_base_scalar_mul}; -use poseidon2::Poseidon2; +pub use poseidon2::poseidon2_permutation; use wasm::Barretenberg; use self::wasm::{Pedersen, SchnorrSig}; @@ -112,7 +112,6 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { inputs: &[FieldElement], len: u32, ) -> Result, BlackBoxResolutionError> { - let poseidon = Poseidon2::new(); - poseidon.permutation(inputs, len) + poseidon2_permutation(inputs, len) } } diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs index e0ed5bcd053..65058e15099 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs @@ -1,9 +1,20 @@ use acir::FieldElement; use acvm_blackbox_solver::BlackBoxResolutionError; -use num_bigint::BigUint; -use num_traits::Num; +use lazy_static::lazy_static; -pub(crate) struct Poseidon2 { +pub fn poseidon2_permutation( + inputs: &[FieldElement], + len: u32, +) -> Result, BlackBoxResolutionError> { + let poseidon = Poseidon2::new(); + poseidon.permutation(inputs, len) +} + +pub(crate) struct Poseidon2<'a> { + config: &'a Poseidon2Config, +} + +struct Poseidon2Config { t: u32, rounds_f: u32, rounds_p: u32, @@ -11,929 +22,415 @@ pub(crate) struct Poseidon2 { round_constant: [[FieldElement; 4]; 64], } -impl Poseidon2 { +fn field_from_hex(hex: &str) -> FieldElement { + FieldElement::from_be_bytes_reduce(&hex::decode(hex).expect("Should be passed only valid hex")) +} + +lazy_static! { + static ref INTERNAL_MATRIX_DIAGONAL: [FieldElement; 4] = [ + field_from_hex("10dc6e9c006ea38b04b1e03b4bd9490c0d03f98929ca1d7fb56821fd19d3b6e7"), + field_from_hex("0c28145b6a44df3e0149b3d0a30b3bb599df9756d4dd9b84a86b38cfb45a740b"), + field_from_hex("00544b8338791518b2c7645a50392798b21f75bb60e3596170067d00141cac15"), + field_from_hex("222c01175718386f2e2e82eb122789e352e105a3b8fa852613bc534433ee428b"), + ]; + static ref ROUND_CONSTANT: [[FieldElement; 4]; 64] = [ + [ + field_from_hex("19b849f69450b06848da1d39bd5e4a4302bb86744edc26238b0878e269ed23e5"), + field_from_hex("265ddfe127dd51bd7239347b758f0a1320eb2cc7450acc1dad47f80c8dcf34d6"), + field_from_hex("199750ec472f1809e0f66a545e1e51624108ac845015c2aa3dfc36bab497d8aa"), + field_from_hex("157ff3fe65ac7208110f06a5f74302b14d743ea25067f0ffd032f787c7f1cdf8"), + ], + [ + field_from_hex("2e49c43c4569dd9c5fd35ac45fca33f10b15c590692f8beefe18f4896ac94902"), + field_from_hex("0e35fb89981890520d4aef2b6d6506c3cb2f0b6973c24fa82731345ffa2d1f1e"), + field_from_hex("251ad47cb15c4f1105f109ae5e944f1ba9d9e7806d667ffec6fe723002e0b996"), + field_from_hex("13da07dc64d428369873e97160234641f8beb56fdd05e5f3563fa39d9c22df4e"), + ], + [ + field_from_hex("0c009b84e650e6d23dc00c7dccef7483a553939689d350cd46e7b89055fd4738"), + field_from_hex("011f16b1c63a854f01992e3956f42d8b04eb650c6d535eb0203dec74befdca06"), + field_from_hex("0ed69e5e383a688f209d9a561daa79612f3f78d0467ad45485df07093f367549"), + field_from_hex("04dba94a7b0ce9e221acad41472b6bbe3aec507f5eb3d33f463672264c9f789b"), + ], + [ + field_from_hex("0a3f2637d840f3a16eb094271c9d237b6036757d4bb50bf7ce732ff1d4fa28e8"), + field_from_hex("259a666f129eea198f8a1c502fdb38fa39b1f075569564b6e54a485d1182323f"), + field_from_hex("28bf7459c9b2f4c6d8e7d06a4ee3a47f7745d4271038e5157a32fdf7ede0d6a1"), + field_from_hex("0a1ca941f057037526ea200f489be8d4c37c85bbcce6a2aeec91bd6941432447"), + ], + [ + field_from_hex("0c6f8f958be0e93053d7fd4fc54512855535ed1539f051dcb43a26fd926361cf"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("123106a93cd17578d426e8128ac9d90aa9e8a00708e296e084dd57e69caaf811"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("26e1ba52ad9285d97dd3ab52f8e840085e8fa83ff1e8f1877b074867cd2dee75"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1cb55cad7bd133de18a64c5c47b9c97cbe4d8b7bf9e095864471537e6a4ae2c5"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1dcd73e46acd8f8e0e2c7ce04bde7f6d2a53043d5060a41c7143f08e6e9055d0"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("011003e32f6d9c66f5852f05474a4def0cda294a0eb4e9b9b12b9bb4512e5574"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2b1e809ac1d10ab29ad5f20d03a57dfebadfe5903f58bafed7c508dd2287ae8c"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2539de1785b735999fb4dac35ee17ed0ef995d05ab2fc5faeaa69ae87bcec0a5"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0c246c5a2ef8ee0126497f222b3e0a0ef4e1c3d41c86d46e43982cb11d77951d"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("192089c4974f68e95408148f7c0632edbb09e6a6ad1a1c2f3f0305f5d03b527b"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1eae0ad8ab68b2f06a0ee36eeb0d0c058529097d91096b756d8fdc2fb5a60d85"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("179190e5d0e22179e46f8282872abc88db6e2fdc0dee99e69768bd98c5d06bfb"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("29bb9e2c9076732576e9a81c7ac4b83214528f7db00f31bf6cafe794a9b3cd1c"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("225d394e42207599403efd0c2464a90d52652645882aac35b10e590e6e691e08"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("064760623c25c8cf753d238055b444532be13557451c087de09efd454b23fd59"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("10ba3a0e01df92e87f301c4b716d8a394d67f4bf42a75c10922910a78f6b5b87"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0e070bf53f8451b24f9c6e96b0c2a801cb511bc0c242eb9d361b77693f21471c"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1b94cd61b051b04dd39755ff93821a73ccd6cb11d2491d8aa7f921014de252fb"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1d7cb39bafb8c744e148787a2e70230f9d4e917d5713bb050487b5aa7d74070b"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2ec93189bd1ab4f69117d0fe980c80ff8785c2961829f701bb74ac1f303b17db"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2db366bfdd36d277a692bb825b86275beac404a19ae07a9082ea46bd83517926"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("062100eb485db06269655cf186a68532985275428450359adc99cec6960711b8"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0761d33c66614aaa570e7f1e8244ca1120243f92fa59e4f900c567bf41f5a59b"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("20fc411a114d13992c2705aa034e3f315d78608a0f7de4ccf7a72e494855ad0d"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("25b5c004a4bdfcb5add9ec4e9ab219ba102c67e8b3effb5fc3a30f317250bc5a"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("23b1822d278ed632a494e58f6df6f5ed038b186d8474155ad87e7dff62b37f4b"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("22734b4c5c3f9493606c4ba9012499bf0f14d13bfcfcccaa16102a29cc2f69e0"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("26c0c8fe09eb30b7e27a74dc33492347e5bdff409aa3610254413d3fad795ce5"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("070dd0ccb6bd7bbae88eac03fa1fbb26196be3083a809829bbd626df348ccad9"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("12b6595bdb329b6fb043ba78bb28c3bec2c0a6de46d8c5ad6067c4ebfd4250da"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("248d97d7f76283d63bec30e7a5876c11c06fca9b275c671c5e33d95bb7e8d729"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1a306d439d463b0816fc6fd64cc939318b45eb759ddde4aa106d15d9bd9baaaa"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("28a8f8372e3c38daced7c00421cb4621f4f1b54ddc27821b0d62d3d6ec7c56cf"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0094975717f9a8a8bb35152f24d43294071ce320c829f388bc852183e1e2ce7e"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("04d5ee4c3aa78f7d80fde60d716480d3593f74d4f653ae83f4103246db2e8d65"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2a6cf5e9aa03d4336349ad6fb8ed2269c7bef54b8822cc76d08495c12efde187"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2304d31eaab960ba9274da43e19ddeb7f792180808fd6e43baae48d7efcba3f3"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("03fd9ac865a4b2a6d5e7009785817249bff08a7e0726fcb4e1c11d39d199f0b0"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("00b7258ded52bbda2248404d55ee5044798afc3a209193073f7954d4d63b0b64"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("159f81ada0771799ec38fca2d4bf65ebb13d3a74f3298db36272c5ca65e92d9a"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1ef90e67437fbc8550237a75bc28e3bb9000130ea25f0c5471e144cf4264431f"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1e65f838515e5ff0196b49aa41a2d2568df739bc176b08ec95a79ed82932e30d"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2b1b045def3a166cec6ce768d079ba74b18c844e570e1f826575c1068c94c33f"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0832e5753ceb0ff6402543b1109229c165dc2d73bef715e3f1c6e07c168bb173"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("02f614e9cedfb3dc6b762ae0a37d41bab1b841c2e8b6451bc5a8e3c390b6ad16"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0e2427d38bd46a60dd640b8e362cad967370ebb777bedff40f6a0be27e7ed705"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0493630b7c670b6deb7c84d414e7ce79049f0ec098c3c7c50768bbe29214a53a"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("22ead100e8e482674decdab17066c5a26bb1515355d5461a3dc06cc85327cea9"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("25b3e56e655b42cdaae2626ed2554d48583f1ae35626d04de5084e0b6d2a6f16"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1e32752ada8836ef5837a6cde8ff13dbb599c336349e4c584b4fdc0a0cf6f9d0"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2fa2a871c15a387cc50f68f6f3c3455b23c00995f05078f672a9864074d412e5"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("2f569b8a9a4424c9278e1db7311e889f54ccbf10661bab7fcd18e7c7a7d83505"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("044cb455110a8fdd531ade530234c518a7df93f7332ffd2144165374b246b43d"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("227808de93906d5d420246157f2e42b191fe8c90adfe118178ddc723a5319025"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("02fcca2934e046bc623adead873579865d03781ae090ad4a8579d2e7a6800355"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("0ef915f0ac120b876abccceb344a1d36bad3f3c5ab91a8ddcbec2e060d8befac"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + field_from_hex("0000000000000000000000000000000000000000000000000000000000000000"), + ], + [ + field_from_hex("1797130f4b7a3e1777eb757bc6f287f6ab0fb85f6be63b09f3b16ef2b1405d38"), + field_from_hex("0a76225dc04170ae3306c85abab59e608c7f497c20156d4d36c668555decc6e5"), + field_from_hex("1fffb9ec1992d66ba1e77a7b93209af6f8fa76d48acb664796174b5326a31a5c"), + field_from_hex("25721c4fc15a3f2853b57c338fa538d85f8fbba6c6b9c6090611889b797b9c5f"), + ], + [ + field_from_hex("0c817fd42d5f7a41215e3d07ba197216adb4c3790705da95eb63b982bfcaf75a"), + field_from_hex("13abe3f5239915d39f7e13c2c24970b6df8cf86ce00a22002bc15866e52b5a96"), + field_from_hex("2106feea546224ea12ef7f39987a46c85c1bc3dc29bdbd7a92cd60acb4d391ce"), + field_from_hex("21ca859468a746b6aaa79474a37dab49f1ca5a28c748bc7157e1b3345bb0f959"), + ], + [ + field_from_hex("05ccd6255c1e6f0c5cf1f0df934194c62911d14d0321662a8f1a48999e34185b"), + field_from_hex("0f0e34a64b70a626e464d846674c4c8816c4fb267fe44fe6ea28678cb09490a4"), + field_from_hex("0558531a4e25470c6157794ca36d0e9647dbfcfe350d64838f5b1a8a2de0d4bf"), + field_from_hex("09d3dca9173ed2faceea125157683d18924cadad3f655a60b72f5864961f1455"), + ], + [ + field_from_hex("0328cbd54e8c0913493f866ed03d218bf23f92d68aaec48617d4c722e5bd4335"), + field_from_hex("2bf07216e2aff0a223a487b1a7094e07e79e7bcc9798c648ee3347dd5329d34b"), + field_from_hex("1daf345a58006b736499c583cb76c316d6f78ed6a6dffc82111e11a63fe412df"), + field_from_hex("176563472456aaa746b694c60e1823611ef39039b2edc7ff391e6f2293d2c404"), + ], + ]; + static ref POSEIDON2_CONFIG: Poseidon2Config = Poseidon2Config { + t: 4, + rounds_f: 8, + rounds_p: 56, + internal_matrix_diagonal: *INTERNAL_MATRIX_DIAGONAL, + round_constant: *ROUND_CONSTANT, + }; +} + +impl<'a> Poseidon2<'a> { pub(crate) fn new() -> Self { - Poseidon2 { - t: 4, - rounds_f: 8, - rounds_p: 56, - internal_matrix_diagonal: [ - Poseidon2::field_from_hex( - "0x10dc6e9c006ea38b04b1e03b4bd9490c0d03f98929ca1d7fb56821fd19d3b6e7", - ), - Poseidon2::field_from_hex( - "0x0c28145b6a44df3e0149b3d0a30b3bb599df9756d4dd9b84a86b38cfb45a740b", - ), - Poseidon2::field_from_hex( - "0x00544b8338791518b2c7645a50392798b21f75bb60e3596170067d00141cac15", - ), - Poseidon2::field_from_hex( - "0x222c01175718386f2e2e82eb122789e352e105a3b8fa852613bc534433ee428b", - ), - ], - round_constant: [ - [ - Poseidon2::field_from_hex( - "0x19b849f69450b06848da1d39bd5e4a4302bb86744edc26238b0878e269ed23e5", - ), - Poseidon2::field_from_hex( - "0x265ddfe127dd51bd7239347b758f0a1320eb2cc7450acc1dad47f80c8dcf34d6", - ), - Poseidon2::field_from_hex( - "0x199750ec472f1809e0f66a545e1e51624108ac845015c2aa3dfc36bab497d8aa", - ), - Poseidon2::field_from_hex( - "0x157ff3fe65ac7208110f06a5f74302b14d743ea25067f0ffd032f787c7f1cdf8", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2e49c43c4569dd9c5fd35ac45fca33f10b15c590692f8beefe18f4896ac94902", - ), - Poseidon2::field_from_hex( - "0x0e35fb89981890520d4aef2b6d6506c3cb2f0b6973c24fa82731345ffa2d1f1e", - ), - Poseidon2::field_from_hex( - "0x251ad47cb15c4f1105f109ae5e944f1ba9d9e7806d667ffec6fe723002e0b996", - ), - Poseidon2::field_from_hex( - "0x13da07dc64d428369873e97160234641f8beb56fdd05e5f3563fa39d9c22df4e", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0c009b84e650e6d23dc00c7dccef7483a553939689d350cd46e7b89055fd4738", - ), - Poseidon2::field_from_hex( - "0x011f16b1c63a854f01992e3956f42d8b04eb650c6d535eb0203dec74befdca06", - ), - Poseidon2::field_from_hex( - "0x0ed69e5e383a688f209d9a561daa79612f3f78d0467ad45485df07093f367549", - ), - Poseidon2::field_from_hex( - "0x04dba94a7b0ce9e221acad41472b6bbe3aec507f5eb3d33f463672264c9f789b", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0a3f2637d840f3a16eb094271c9d237b6036757d4bb50bf7ce732ff1d4fa28e8", - ), - Poseidon2::field_from_hex( - "0x259a666f129eea198f8a1c502fdb38fa39b1f075569564b6e54a485d1182323f", - ), - Poseidon2::field_from_hex( - "0x28bf7459c9b2f4c6d8e7d06a4ee3a47f7745d4271038e5157a32fdf7ede0d6a1", - ), - Poseidon2::field_from_hex( - "0x0a1ca941f057037526ea200f489be8d4c37c85bbcce6a2aeec91bd6941432447", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0c6f8f958be0e93053d7fd4fc54512855535ed1539f051dcb43a26fd926361cf", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x123106a93cd17578d426e8128ac9d90aa9e8a00708e296e084dd57e69caaf811", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x26e1ba52ad9285d97dd3ab52f8e840085e8fa83ff1e8f1877b074867cd2dee75", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1cb55cad7bd133de18a64c5c47b9c97cbe4d8b7bf9e095864471537e6a4ae2c5", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1dcd73e46acd8f8e0e2c7ce04bde7f6d2a53043d5060a41c7143f08e6e9055d0", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x011003e32f6d9c66f5852f05474a4def0cda294a0eb4e9b9b12b9bb4512e5574", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2b1e809ac1d10ab29ad5f20d03a57dfebadfe5903f58bafed7c508dd2287ae8c", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2539de1785b735999fb4dac35ee17ed0ef995d05ab2fc5faeaa69ae87bcec0a5", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0c246c5a2ef8ee0126497f222b3e0a0ef4e1c3d41c86d46e43982cb11d77951d", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x192089c4974f68e95408148f7c0632edbb09e6a6ad1a1c2f3f0305f5d03b527b", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1eae0ad8ab68b2f06a0ee36eeb0d0c058529097d91096b756d8fdc2fb5a60d85", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x179190e5d0e22179e46f8282872abc88db6e2fdc0dee99e69768bd98c5d06bfb", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x29bb9e2c9076732576e9a81c7ac4b83214528f7db00f31bf6cafe794a9b3cd1c", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x225d394e42207599403efd0c2464a90d52652645882aac35b10e590e6e691e08", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x064760623c25c8cf753d238055b444532be13557451c087de09efd454b23fd59", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x10ba3a0e01df92e87f301c4b716d8a394d67f4bf42a75c10922910a78f6b5b87", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0e070bf53f8451b24f9c6e96b0c2a801cb511bc0c242eb9d361b77693f21471c", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1b94cd61b051b04dd39755ff93821a73ccd6cb11d2491d8aa7f921014de252fb", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1d7cb39bafb8c744e148787a2e70230f9d4e917d5713bb050487b5aa7d74070b", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2ec93189bd1ab4f69117d0fe980c80ff8785c2961829f701bb74ac1f303b17db", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2db366bfdd36d277a692bb825b86275beac404a19ae07a9082ea46bd83517926", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x062100eb485db06269655cf186a68532985275428450359adc99cec6960711b8", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0761d33c66614aaa570e7f1e8244ca1120243f92fa59e4f900c567bf41f5a59b", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x20fc411a114d13992c2705aa034e3f315d78608a0f7de4ccf7a72e494855ad0d", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x25b5c004a4bdfcb5add9ec4e9ab219ba102c67e8b3effb5fc3a30f317250bc5a", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x23b1822d278ed632a494e58f6df6f5ed038b186d8474155ad87e7dff62b37f4b", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x22734b4c5c3f9493606c4ba9012499bf0f14d13bfcfcccaa16102a29cc2f69e0", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x26c0c8fe09eb30b7e27a74dc33492347e5bdff409aa3610254413d3fad795ce5", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x070dd0ccb6bd7bbae88eac03fa1fbb26196be3083a809829bbd626df348ccad9", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x12b6595bdb329b6fb043ba78bb28c3bec2c0a6de46d8c5ad6067c4ebfd4250da", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x248d97d7f76283d63bec30e7a5876c11c06fca9b275c671c5e33d95bb7e8d729", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1a306d439d463b0816fc6fd64cc939318b45eb759ddde4aa106d15d9bd9baaaa", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x28a8f8372e3c38daced7c00421cb4621f4f1b54ddc27821b0d62d3d6ec7c56cf", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0094975717f9a8a8bb35152f24d43294071ce320c829f388bc852183e1e2ce7e", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x04d5ee4c3aa78f7d80fde60d716480d3593f74d4f653ae83f4103246db2e8d65", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2a6cf5e9aa03d4336349ad6fb8ed2269c7bef54b8822cc76d08495c12efde187", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2304d31eaab960ba9274da43e19ddeb7f792180808fd6e43baae48d7efcba3f3", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x03fd9ac865a4b2a6d5e7009785817249bff08a7e0726fcb4e1c11d39d199f0b0", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x00b7258ded52bbda2248404d55ee5044798afc3a209193073f7954d4d63b0b64", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x159f81ada0771799ec38fca2d4bf65ebb13d3a74f3298db36272c5ca65e92d9a", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1ef90e67437fbc8550237a75bc28e3bb9000130ea25f0c5471e144cf4264431f", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1e65f838515e5ff0196b49aa41a2d2568df739bc176b08ec95a79ed82932e30d", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2b1b045def3a166cec6ce768d079ba74b18c844e570e1f826575c1068c94c33f", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0832e5753ceb0ff6402543b1109229c165dc2d73bef715e3f1c6e07c168bb173", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x02f614e9cedfb3dc6b762ae0a37d41bab1b841c2e8b6451bc5a8e3c390b6ad16", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0e2427d38bd46a60dd640b8e362cad967370ebb777bedff40f6a0be27e7ed705", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0493630b7c670b6deb7c84d414e7ce79049f0ec098c3c7c50768bbe29214a53a", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x22ead100e8e482674decdab17066c5a26bb1515355d5461a3dc06cc85327cea9", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x25b3e56e655b42cdaae2626ed2554d48583f1ae35626d04de5084e0b6d2a6f16", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1e32752ada8836ef5837a6cde8ff13dbb599c336349e4c584b4fdc0a0cf6f9d0", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2fa2a871c15a387cc50f68f6f3c3455b23c00995f05078f672a9864074d412e5", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x2f569b8a9a4424c9278e1db7311e889f54ccbf10661bab7fcd18e7c7a7d83505", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x044cb455110a8fdd531ade530234c518a7df93f7332ffd2144165374b246b43d", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x227808de93906d5d420246157f2e42b191fe8c90adfe118178ddc723a5319025", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x02fcca2934e046bc623adead873579865d03781ae090ad4a8579d2e7a6800355", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0ef915f0ac120b876abccceb344a1d36bad3f3c5ab91a8ddcbec2e060d8befac", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - Poseidon2::field_from_hex( - "0x0000000000000000000000000000000000000000000000000000000000000000", - ), - ], - [ - Poseidon2::field_from_hex( - "0x1797130f4b7a3e1777eb757bc6f287f6ab0fb85f6be63b09f3b16ef2b1405d38", - ), - Poseidon2::field_from_hex( - "0x0a76225dc04170ae3306c85abab59e608c7f497c20156d4d36c668555decc6e5", - ), - Poseidon2::field_from_hex( - "0x1fffb9ec1992d66ba1e77a7b93209af6f8fa76d48acb664796174b5326a31a5c", - ), - Poseidon2::field_from_hex( - "0x25721c4fc15a3f2853b57c338fa538d85f8fbba6c6b9c6090611889b797b9c5f", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0c817fd42d5f7a41215e3d07ba197216adb4c3790705da95eb63b982bfcaf75a", - ), - Poseidon2::field_from_hex( - "0x13abe3f5239915d39f7e13c2c24970b6df8cf86ce00a22002bc15866e52b5a96", - ), - Poseidon2::field_from_hex( - "0x2106feea546224ea12ef7f39987a46c85c1bc3dc29bdbd7a92cd60acb4d391ce", - ), - Poseidon2::field_from_hex( - "0x21ca859468a746b6aaa79474a37dab49f1ca5a28c748bc7157e1b3345bb0f959", - ), - ], - [ - Poseidon2::field_from_hex( - "0x05ccd6255c1e6f0c5cf1f0df934194c62911d14d0321662a8f1a48999e34185b", - ), - Poseidon2::field_from_hex( - "0x0f0e34a64b70a626e464d846674c4c8816c4fb267fe44fe6ea28678cb09490a4", - ), - Poseidon2::field_from_hex( - "0x0558531a4e25470c6157794ca36d0e9647dbfcfe350d64838f5b1a8a2de0d4bf", - ), - Poseidon2::field_from_hex( - "0x09d3dca9173ed2faceea125157683d18924cadad3f655a60b72f5864961f1455", - ), - ], - [ - Poseidon2::field_from_hex( - "0x0328cbd54e8c0913493f866ed03d218bf23f92d68aaec48617d4c722e5bd4335", - ), - Poseidon2::field_from_hex( - "0x2bf07216e2aff0a223a487b1a7094e07e79e7bcc9798c648ee3347dd5329d34b", - ), - Poseidon2::field_from_hex( - "0x1daf345a58006b736499c583cb76c316d6f78ed6a6dffc82111e11a63fe412df", - ), - Poseidon2::field_from_hex( - "0x176563472456aaa746b694c60e1823611ef39039b2edc7ff391e6f2293d2c404", - ), - ], - ], - } - } - fn field_from_hex(hex: &str) -> FieldElement { - let bigint = BigUint::from_str_radix(hex.strip_prefix("0x").unwrap(), 16).unwrap(); - FieldElement::from_be_bytes_reduce(&bigint.to_bytes_be()) + Poseidon2 { config: &POSEIDON2_CONFIG } } fn single_box(x: FieldElement) -> FieldElement { @@ -948,7 +445,9 @@ impl Poseidon2 { } fn add_round_constants(&self, state: &mut [FieldElement], round: usize) { - for (state_element, constant_element) in state.iter_mut().zip(self.round_constant[round]) { + for (state_element, constant_element) in + state.iter_mut().zip(self.config.round_constant[round]) + { *state_element += constant_element; } } @@ -982,7 +481,7 @@ impl Poseidon2 { sum += *i; } for (index, i) in input.iter_mut().enumerate() { - *i = *i * self.internal_matrix_diagonal[index]; + *i = *i * self.config.internal_matrix_diagonal[index]; *i += sum; } } @@ -1002,10 +501,10 @@ impl Poseidon2 { ), )); } - if len != self.t { + if len != self.config.t { return Err(BlackBoxResolutionError::Failed( acir::BlackBoxFunc::Poseidon2Permutation, - format!("Expected {} values but encountered {}", self.t, len), + format!("Expected {} values but encountered {}", self.config.t, len), )); } // Read witness assignments @@ -1017,22 +516,22 @@ impl Poseidon2 { Self::matrix_multiplication_4x4(&mut state); // First set of external rounds - let rf_first = self.rounds_f / 2; + let rf_first = self.config.rounds_f / 2; for r in 0..rf_first { self.add_round_constants(&mut state, r as usize); Self::s_box(&mut state); Self::matrix_multiplication_4x4(&mut state); } // Internal rounds - let p_end = rf_first + self.rounds_p; + let p_end = rf_first + self.config.rounds_p; for r in rf_first..p_end { - state[0] += self.round_constant[r as usize][0]; + state[0] += self.config.round_constant[r as usize][0]; state[0] = Self::single_box(state[0]); self.internal_m_multiplication(&mut state); } // Remaining external rounds - let num_rounds = self.rounds_f + self.rounds_p; + let num_rounds = self.config.rounds_f + self.config.rounds_p; for i in p_end..num_rounds { self.add_round_constants(&mut state, i as usize); Self::s_box(&mut state); @@ -1041,3 +540,24 @@ impl Poseidon2 { Ok(state.into()) } } + +#[cfg(test)] +mod test { + use acir::FieldElement; + + use super::{field_from_hex, poseidon2_permutation}; + + #[test] + fn smoke_test() { + let inputs = [FieldElement::zero(); 4]; + let result = poseidon2_permutation(&inputs, 4).expect("should successfully permute"); + + let expected_result = [ + field_from_hex("18DFB8DC9B82229CFF974EFEFC8DF78B1CE96D9D844236B496785C698BC6732E"), + field_from_hex("095C230D1D37A246E8D2D5A63B165FE0FADE040D442F61E25F0590E5FB76F839"), + field_from_hex("0BB9545846E1AFA4FA3C97414A60A20FC4949F537A68CCECA34C5CE71E28AA59"), + field_from_hex("18A4F34C9C6F99335FF7638B82AEED9018026618358873C982BBDDE265B2ED6D"), + ]; + assert_eq!(result, expected_result); + } +} diff --git a/noir/noir-repo/compiler/noirc_driver/src/contract.rs b/noir/noir-repo/compiler/noirc_driver/src/contract.rs index a33a9b809d3..d6c3dc6205d 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/contract.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/contract.rs @@ -53,7 +53,7 @@ pub struct ContractFunction { )] pub bytecode: Program, - pub debug: DebugInfo, + pub debug: Vec, /// Names of the functions in the program. These are used for more informative debugging and benchmarking. pub names: Vec, diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 6fe44780484..8a554879e9f 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -430,7 +430,8 @@ fn compile_contract_inner( } if errors.is_empty() { - let debug_infos: Vec<_> = functions.iter().map(|function| function.debug.clone()).collect(); + let debug_infos: Vec<_> = + functions.iter().flat_map(|function| function.debug.clone()).collect(); let file_map = filter_relevant_files(&debug_infos, &context.file_manager); let out_structs = contract @@ -547,13 +548,8 @@ pub fn compile_no_check( Ok(CompiledProgram { hash, - // TODO(https://github.com/noir-lang/noir/issues/4428) program, - // TODO(https://github.com/noir-lang/noir/issues/4428) - // Debug info is only relevant for errors at execution time which is not yet supported - // The CompileProgram `debug` field is used in multiple places and is better - // left to be updated once execution of multiple ACIR functions is enabled - debug: debug[0].clone(), + debug, abi, file_map, noir_version: NOIR_ARTIFACT_VERSION_STRING.to_string(), diff --git a/noir/noir-repo/compiler/noirc_driver/src/program.rs b/noir/noir-repo/compiler/noirc_driver/src/program.rs index 9ffd2d70dda..ed7ddb29f59 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/program.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/program.rs @@ -24,7 +24,7 @@ pub struct CompiledProgram { )] pub program: Program, pub abi: noirc_abi::Abi, - pub debug: DebugInfo, + pub debug: Vec, pub file_map: BTreeMap, pub warnings: Vec, /// Names of the functions in the program. These are used for more informative debugging and benchmarking. diff --git a/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs b/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs index 09117bdc3b7..54e2521e413 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs @@ -46,6 +46,49 @@ pub type DebugVariables = BTreeMap; pub type DebugFunctions = BTreeMap; pub type DebugTypes = BTreeMap; +#[derive(Default, Debug, Clone, Deserialize, Serialize)] +pub struct ProgramDebugInfo { + pub debug_infos: Vec, +} + +impl ProgramDebugInfo { + pub fn serialize_compressed_base64_json( + debug_info: &ProgramDebugInfo, + s: S, + ) -> Result + where + S: Serializer, + { + let json_str = serde_json::to_string(debug_info).map_err(S::Error::custom)?; + + let mut encoder = DeflateEncoder::new(Vec::new(), Compression::default()); + encoder.write_all(json_str.as_bytes()).map_err(S::Error::custom)?; + let compressed_data = encoder.finish().map_err(S::Error::custom)?; + + let encoded_b64 = base64::prelude::BASE64_STANDARD.encode(compressed_data); + s.serialize_str(&encoded_b64) + } + + pub fn deserialize_compressed_base64_json<'de, D>( + deserializer: D, + ) -> Result + where + D: Deserializer<'de>, + { + let encoded_b64: String = Deserialize::deserialize(deserializer)?; + + let compressed_data = + base64::prelude::BASE64_STANDARD.decode(encoded_b64).map_err(D::Error::custom)?; + + let mut decoder = DeflateDecoder::new(&compressed_data[..]); + let mut decompressed_data = Vec::new(); + decoder.read_to_end(&mut decompressed_data).map_err(D::Error::custom)?; + + let json_str = String::from_utf8(decompressed_data).map_err(D::Error::custom)?; + serde_json::from_str(&json_str).map_err(D::Error::custom) + } +} + #[serde_as] #[derive(Default, Debug, Clone, Deserialize, Serialize)] pub struct DebugInfo { @@ -130,40 +173,4 @@ impl DebugInfo { counted_opcodes } - - pub fn serialize_compressed_base64_json( - debug_info: &DebugInfo, - s: S, - ) -> Result - where - S: Serializer, - { - let json_str = serde_json::to_string(debug_info).map_err(S::Error::custom)?; - - let mut encoder = DeflateEncoder::new(Vec::new(), Compression::default()); - encoder.write_all(json_str.as_bytes()).map_err(S::Error::custom)?; - let compressed_data = encoder.finish().map_err(S::Error::custom)?; - - let encoded_b64 = base64::prelude::BASE64_STANDARD.encode(compressed_data); - s.serialize_str(&encoded_b64) - } - - pub fn deserialize_compressed_base64_json<'de, D>( - deserializer: D, - ) -> Result - where - D: Deserializer<'de>, - { - let encoded_b64: String = Deserialize::deserialize(deserializer)?; - - let compressed_data = - base64::prelude::BASE64_STANDARD.decode(encoded_b64).map_err(D::Error::custom)?; - - let mut decoder = DeflateDecoder::new(&compressed_data[..]); - let mut decompressed_data = Vec::new(); - decoder.read_to_end(&mut decompressed_data).map_err(D::Error::custom)?; - - let json_str = String::from_utf8(decompressed_data).map_err(D::Error::custom)?; - serde_json::from_str(&json_str).map_err(D::Error::custom) - } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 59ce4e4f754..2a4b5276547 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -243,6 +243,7 @@ impl<'a> FunctionContext<'a> { Ok(Tree::Branch(vec![string, field_count.into(), fields])) } + ast::Literal::Unit => Ok(Self::unit_value()), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml index 03b92e15032..e39ab2fe106 100644 --- a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml @@ -24,9 +24,16 @@ small-ord-set = "0.1.3" regex = "1.9.1" tracing.workspace = true petgraph = "0.6" +lalrpop-util = { version = "0.20.2", features = ["lexer"] } [dev-dependencies] base64.workspace = true strum = "0.24" strum_macros = "0.24" tempfile.workspace = true + +[build-dependencies] +lalrpop = "0.20.2" + +[features] +experimental_parser = [] diff --git a/noir/noir-repo/compiler/noirc_frontend/build.rs b/noir/noir-repo/compiler/noirc_frontend/build.rs new file mode 100644 index 00000000000..eb896a377ae --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/build.rs @@ -0,0 +1,28 @@ +use std::fs::{read_to_string, File}; +use std::io::Write; + +fn main() { + lalrpop::Configuration::new() + .emit_rerun_directives(true) + .use_cargo_dir_conventions() + .process() + .unwrap(); + + // here, we get a lint error from "extern crate core" so patching that until lalrpop does + // (adding cfg directives appears to be unsupported by lalrpop) + let out_dir = std::env::var("OUT_DIR").unwrap(); + let parser_path = std::path::Path::new(&out_dir).join("noir_parser.rs"); + let content_str = read_to_string(parser_path.clone()).unwrap(); + let mut parser_file = File::create(parser_path).unwrap(); + for line in content_str.lines() { + if line.contains("extern crate core") { + parser_file + .write_all( + format!("{}\n", line.replace("extern crate core", "use core")).as_bytes(), + ) + .unwrap(); + } else { + parser_file.write_all(format!("{}\n", line).as_bytes()).unwrap(); + } + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 4547dc2a176..254ec4a7590 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -112,6 +112,9 @@ pub enum UnresolvedTypeData { /*env:*/ Box, ), + // The type of quoted code for metaprogramming + Code, + Unspecified, // This is for when the user declares a variable without specifying it's type Error, } @@ -200,6 +203,7 @@ impl std::fmt::Display for UnresolvedTypeData { } } MutableReference(element) => write!(f, "&mut {element}"), + Code => write!(f, "Code"), Unit => write!(f, "()"), Error => write!(f, "error"), Unspecified => write!(f, "unspecified"), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_ast.rs new file mode 100644 index 00000000000..8ffcbce7d62 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_ast.rs @@ -0,0 +1,350 @@ +use iter_extended::vecmap; +use noirc_errors::{Span, Spanned}; + +use crate::ast::{ConstrainStatement, Expression, Statement, StatementKind}; +use crate::hir_def::expr::{HirArrayLiteral, HirExpression, HirIdent}; +use crate::hir_def::stmt::{HirLValue, HirPattern, HirStatement}; +use crate::macros_api::HirLiteral; +use crate::node_interner::{ExprId, NodeInterner, StmtId}; +use crate::{ + ArrayLiteral, AssignStatement, BlockExpression, CallExpression, CastExpression, ConstrainKind, + ConstructorExpression, ExpressionKind, ForLoopStatement, ForRange, Ident, IfExpression, + IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, + MemberAccessExpression, MethodCallExpression, Path, Pattern, PrefixExpression, Type, + UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, +}; + +// TODO: +// - Full path for idents & types +// - Assert/AssertEq information lost +// - The type name span is lost in constructor patterns & expressions +// - All type spans are lost +// - Type::TypeVariable has no equivalent in the Ast + +impl StmtId { + #[allow(unused)] + fn to_ast(self, interner: &NodeInterner) -> Statement { + let statement = interner.statement(&self); + let span = interner.statement_span(&self); + + let kind = match statement { + HirStatement::Let(let_stmt) => { + let pattern = let_stmt.pattern.into_ast(interner); + let r#type = interner.id_type(let_stmt.expression).to_ast(); + let expression = let_stmt.expression.to_ast(interner); + StatementKind::Let(LetStatement { + pattern, + r#type, + expression, + attributes: Vec::new(), + }) + } + HirStatement::Constrain(constrain) => { + let expr = constrain.0.to_ast(interner); + let message = constrain.2.map(|message| message.to_ast(interner)); + + // TODO: Find difference in usage between Assert & AssertEq + StatementKind::Constrain(ConstrainStatement(expr, message, ConstrainKind::Assert)) + } + HirStatement::Assign(assign) => StatementKind::Assign(AssignStatement { + lvalue: assign.lvalue.into_ast(interner), + expression: assign.expression.to_ast(interner), + }), + HirStatement::For(for_stmt) => StatementKind::For(ForLoopStatement { + identifier: for_stmt.identifier.to_ast(interner), + range: ForRange::Range( + for_stmt.start_range.to_ast(interner), + for_stmt.end_range.to_ast(interner), + ), + block: for_stmt.block.to_ast(interner), + span, + }), + HirStatement::Break => StatementKind::Break, + HirStatement::Continue => StatementKind::Continue, + HirStatement::Expression(expr) => StatementKind::Expression(expr.to_ast(interner)), + HirStatement::Semi(expr) => StatementKind::Semi(expr.to_ast(interner)), + HirStatement::Error => StatementKind::Error, + }; + + Statement { kind, span } + } +} + +impl ExprId { + #[allow(unused)] + fn to_ast(self, interner: &NodeInterner) -> Expression { + let expression = interner.expression(&self); + let span = interner.expr_span(&self); + + let kind = match expression { + HirExpression::Ident(ident) => { + let path = Path::from_ident(ident.to_ast(interner)); + ExpressionKind::Variable(path) + } + HirExpression::Literal(HirLiteral::Array(array)) => { + let array = array.into_ast(interner, span); + ExpressionKind::Literal(Literal::Array(array)) + } + HirExpression::Literal(HirLiteral::Slice(array)) => { + let array = array.into_ast(interner, span); + ExpressionKind::Literal(Literal::Slice(array)) + } + HirExpression::Literal(HirLiteral::Bool(value)) => { + ExpressionKind::Literal(Literal::Bool(value)) + } + HirExpression::Literal(HirLiteral::Integer(value, sign)) => { + ExpressionKind::Literal(Literal::Integer(value, sign)) + } + HirExpression::Literal(HirLiteral::Str(string)) => { + ExpressionKind::Literal(Literal::Str(string)) + } + HirExpression::Literal(HirLiteral::FmtStr(string, _exprs)) => { + // TODO: Is throwing away the exprs here valid? + ExpressionKind::Literal(Literal::FmtStr(string)) + } + HirExpression::Literal(HirLiteral::Unit) => ExpressionKind::Literal(Literal::Unit), + HirExpression::Block(expr) => { + let statements = vecmap(expr.statements, |statement| statement.to_ast(interner)); + ExpressionKind::Block(BlockExpression { statements }) + } + HirExpression::Prefix(prefix) => ExpressionKind::Prefix(Box::new(PrefixExpression { + operator: prefix.operator, + rhs: prefix.rhs.to_ast(interner), + })), + HirExpression::Infix(infix) => ExpressionKind::Infix(Box::new(InfixExpression { + lhs: infix.lhs.to_ast(interner), + operator: Spanned::from(infix.operator.location.span, infix.operator.kind), + rhs: infix.rhs.to_ast(interner), + })), + HirExpression::Index(index) => ExpressionKind::Index(Box::new(IndexExpression { + collection: index.collection.to_ast(interner), + index: index.index.to_ast(interner), + })), + HirExpression::Constructor(constructor) => { + let type_name = constructor.r#type.borrow().name.to_string(); + let type_name = Path::from_single(type_name, span); + let fields = + vecmap(constructor.fields, |(name, expr)| (name, expr.to_ast(interner))); + + ExpressionKind::Constructor(Box::new(ConstructorExpression { type_name, fields })) + } + HirExpression::MemberAccess(access) => { + ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { + lhs: access.lhs.to_ast(interner), + rhs: access.rhs, + })) + } + HirExpression::Call(call) => { + let func = Box::new(call.func.to_ast(interner)); + let arguments = vecmap(call.arguments, |arg| arg.to_ast(interner)); + ExpressionKind::Call(Box::new(CallExpression { func, arguments })) + } + HirExpression::MethodCall(method_call) => { + ExpressionKind::MethodCall(Box::new(MethodCallExpression { + object: method_call.object.to_ast(interner), + method_name: method_call.method, + arguments: vecmap(method_call.arguments, |arg| arg.to_ast(interner)), + })) + } + HirExpression::Cast(cast) => { + let lhs = cast.lhs.to_ast(interner); + let r#type = cast.r#type.to_ast(); + ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type })) + } + HirExpression::If(if_expr) => ExpressionKind::If(Box::new(IfExpression { + condition: if_expr.condition.to_ast(interner), + consequence: if_expr.consequence.to_ast(interner), + alternative: if_expr.alternative.map(|expr| expr.to_ast(interner)), + })), + HirExpression::Tuple(fields) => { + ExpressionKind::Tuple(vecmap(fields, |field| field.to_ast(interner))) + } + HirExpression::Lambda(lambda) => { + let parameters = vecmap(lambda.parameters, |(pattern, typ)| { + (pattern.into_ast(interner), typ.to_ast()) + }); + let return_type = lambda.return_type.to_ast(); + let body = lambda.body.to_ast(interner); + ExpressionKind::Lambda(Box::new(Lambda { parameters, return_type, body })) + } + HirExpression::Quote(block) => ExpressionKind::Quote(block), + HirExpression::Error => ExpressionKind::Error, + }; + + Expression::new(kind, span) + } +} + +impl HirPattern { + fn into_ast(self, interner: &NodeInterner) -> Pattern { + match self { + HirPattern::Identifier(ident) => Pattern::Identifier(ident.to_ast(interner)), + HirPattern::Mutable(pattern, location) => { + let pattern = Box::new(pattern.into_ast(interner)); + Pattern::Mutable(pattern, location.span, false) + } + HirPattern::Tuple(patterns, location) => { + let patterns = vecmap(patterns, |pattern| pattern.into_ast(interner)); + Pattern::Tuple(patterns, location.span) + } + HirPattern::Struct(typ, patterns, location) => { + let patterns = + vecmap(patterns, |(name, pattern)| (name, pattern.into_ast(interner))); + let name = match typ.follow_bindings() { + Type::Struct(struct_def, _) => { + let struct_def = struct_def.borrow(); + struct_def.name.0.contents.clone() + } + // This pass shouldn't error so if the type isn't a struct we just get a string + // representation of any other type and use that. We're relying on name + // resolution to fail later when this Ast is re-converted to Hir. + other => other.to_string(), + }; + // The name span is lost here + let path = Path::from_single(name, location.span); + Pattern::Struct(path, patterns, location.span) + } + } + } +} + +impl HirIdent { + fn to_ast(&self, interner: &NodeInterner) -> Ident { + let name = interner.definition_name(self.id).to_owned(); + Ident(Spanned::from(self.location.span, name)) + } +} + +impl Type { + fn to_ast(&self) -> UnresolvedType { + let typ = match self { + Type::FieldElement => UnresolvedTypeData::FieldElement, + Type::Array(length, element) => { + let length = length.to_type_expression(); + let element = Box::new(element.to_ast()); + UnresolvedTypeData::Array(length, element) + } + Type::Slice(element) => { + let element = Box::new(element.to_ast()); + UnresolvedTypeData::Slice(element) + } + Type::Integer(sign, bit_size) => UnresolvedTypeData::Integer(*sign, *bit_size), + Type::Bool => UnresolvedTypeData::Bool, + Type::String(length) => { + let length = length.to_type_expression(); + UnresolvedTypeData::String(Some(length)) + } + Type::FmtString(length, element) => { + let length = length.to_type_expression(); + let element = Box::new(element.to_ast()); + UnresolvedTypeData::FormatString(length, element) + } + Type::Unit => UnresolvedTypeData::Unit, + Type::Tuple(fields) => { + let fields = vecmap(fields, |field| field.to_ast()); + UnresolvedTypeData::Tuple(fields) + } + Type::Struct(def, generics) => { + let struct_def = def.borrow(); + let generics = vecmap(generics, |generic| generic.to_ast()); + let name = Path::from_ident(struct_def.name.clone()); + UnresolvedTypeData::Named(name, generics, false) + } + Type::Alias(type_def, generics) => { + // Keep the alias name instead of expanding this in case the + // alias' definition was changed + let type_def = type_def.borrow(); + let generics = vecmap(generics, |generic| generic.to_ast()); + let name = Path::from_ident(type_def.name.clone()); + UnresolvedTypeData::Named(name, generics, false) + } + Type::TypeVariable(_, _) => todo!("Convert Type::TypeVariable Hir -> Ast"), + Type::TraitAsType(_, name, generics) => { + let generics = vecmap(generics, |generic| generic.to_ast()); + let name = Path::from_single(name.as_ref().clone(), Span::default()); + UnresolvedTypeData::TraitAsType(name, generics) + } + Type::NamedGeneric(_, name) => { + let name = Path::from_single(name.as_ref().clone(), Span::default()); + UnresolvedTypeData::TraitAsType(name, Vec::new()) + } + Type::Function(args, ret, env) => { + let args = vecmap(args, |arg| arg.to_ast()); + let ret = Box::new(ret.to_ast()); + let env = Box::new(env.to_ast()); + UnresolvedTypeData::Function(args, ret, env) + } + Type::MutableReference(element) => { + let element = Box::new(element.to_ast()); + UnresolvedTypeData::MutableReference(element) + } + // Type::Forall is only for generic functions which don't store a type + // in their Ast so they don't need to call to_ast for their Forall type. + // Since there is no UnresolvedTypeData equivalent for Type::Forall, we use + // this to ignore this case since it shouldn't be needed anyway. + Type::Forall(_, typ) => return typ.to_ast(), + Type::Constant(_) => panic!("Type::Constant where a type was expected: {self:?}"), + Type::Code => UnresolvedTypeData::Code, + Type::Error => UnresolvedTypeData::Error, + }; + + UnresolvedType { typ, span: None } + } + + fn to_type_expression(&self) -> UnresolvedTypeExpression { + let span = Span::default(); + + match self.follow_bindings() { + Type::Constant(length) => UnresolvedTypeExpression::Constant(length, span), + Type::NamedGeneric(_, name) => { + let path = Path::from_single(name.as_ref().clone(), span); + UnresolvedTypeExpression::Variable(path) + } + // TODO: This should be turned into a proper error. + other => panic!("Cannot represent {other:?} as type expression"), + } + } +} + +impl HirLValue { + fn into_ast(self, interner: &NodeInterner) -> LValue { + match self { + HirLValue::Ident(ident, _) => LValue::Ident(ident.to_ast(interner)), + HirLValue::MemberAccess { object, field_name, field_index: _, typ: _, location } => { + let object = Box::new(object.into_ast(interner)); + LValue::MemberAccess { object, field_name, span: location.span } + } + HirLValue::Index { array, index, typ: _, location } => { + let array = Box::new(array.into_ast(interner)); + let index = index.to_ast(interner); + LValue::Index { array, index, span: location.span } + } + HirLValue::Dereference { lvalue, element_type: _, location } => { + let lvalue = Box::new(lvalue.into_ast(interner)); + LValue::Dereference(lvalue, location.span) + } + } + } +} + +impl HirArrayLiteral { + fn into_ast(self, interner: &NodeInterner, span: Span) -> ArrayLiteral { + match self { + HirArrayLiteral::Standard(elements) => { + ArrayLiteral::Standard(vecmap(elements, |element| element.to_ast(interner))) + } + HirArrayLiteral::Repeated { repeated_element, length } => { + let repeated_element = Box::new(repeated_element.to_ast(interner)); + let length = match length { + Type::Constant(length) => { + let literal = Literal::Integer((length as u128).into(), false); + let kind = ExpressionKind::Literal(literal); + Box::new(Expression::new(kind, span)) + } + other => panic!("Cannot convert non-constant type for repeated array literal from Hir -> Ast: {other:?}"), + }; + ArrayLiteral::Repeated { repeated_element, length } + } + } + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs new file mode 100644 index 00000000000..91621c857cf --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs @@ -0,0 +1 @@ +mod hir_to_ast; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs index 727a6596df1..b4804739cac 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs @@ -1,3 +1,4 @@ +pub mod comptime; pub mod def_collector; pub mod def_map; pub mod resolution; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 08b12069d76..9180201fe17 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -502,6 +502,7 @@ impl<'a> Resolver<'a> { let fields = self.resolve_type_inner(*fields, new_variables); Type::FmtString(Box::new(resolved_size), Box::new(fields)) } + Code => Type::Code, Unit => Type::Unit, Unspecified => Type::Error, Error => Type::Error, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs index 265b9e4b5a3..2d1ebf530e3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs @@ -2,7 +2,9 @@ use crate::token::{Attribute, DocStyle}; use super::{ errors::LexerErrorKind, - token::{IntType, Keyword, SpannedToken, Token, Tokens}, + token::{ + token_to_borrowed_token, BorrowedToken, IntType, Keyword, SpannedToken, Token, Tokens, + }, }; use acvm::FieldElement; use noirc_errors::{Position, Span}; @@ -21,6 +23,21 @@ pub struct Lexer<'a> { pub type SpannedTokenResult = Result; +pub(crate) fn from_spanned_token_result( + token_result: &SpannedTokenResult, +) -> Result<(usize, BorrowedToken<'_>, usize), LexerErrorKind> { + token_result + .as_ref() + .map(|spanned_token| { + ( + spanned_token.to_span().start() as usize, + token_to_borrowed_token(spanned_token.into()), + spanned_token.to_span().end() as usize, + ) + }) + .map_err(Clone::clone) +} + impl<'a> Lexer<'a> { /// Given a source file of noir code, return all the tokens in the file /// in order, along with any lexing errors that occurred. @@ -94,7 +111,7 @@ impl<'a> Lexer<'a> { fn next_token(&mut self) -> SpannedTokenResult { match self.next_char() { - Some(x) if x.is_whitespace() => { + Some(x) if Self::is_code_whitespace(x) => { let spanned = self.eat_whitespace(x); if self.skip_whitespaces { self.next_token() @@ -560,16 +577,21 @@ impl<'a> Lexer<'a> { } } + fn is_code_whitespace(c: char) -> bool { + c == '\t' || c == '\n' || c == '\r' || c == ' ' + } + /// Skips white space. They are not significant in the source language fn eat_whitespace(&mut self, initial_char: char) -> SpannedToken { let start = self.position; - let whitespace = self.eat_while(initial_char.into(), |ch| ch.is_whitespace()); + let whitespace = self.eat_while(initial_char.into(), Self::is_code_whitespace); SpannedToken::new(Token::Whitespace(whitespace), Span::inclusive(start, self.position)) } } impl<'a> Iterator for Lexer<'a> { type Item = SpannedTokenResult; + fn next(&mut self) -> Option { if self.done { None @@ -578,10 +600,12 @@ impl<'a> Iterator for Lexer<'a> { } } } + #[cfg(test)] mod tests { use super::*; use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; + #[test] fn test_single_double_char() { let input = "! != + ( ) { } [ ] | , ; : :: < <= > >= & - -> . .. % / * = == << >>"; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index 357b1ead593..86f26fd1c97 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -9,12 +9,105 @@ use crate::lexer::errors::LexerErrorKind; /// smallest unit of grammar. A parser may (will) decide to parse /// items differently depending on the Tokens present but will /// never parse the same ordering of identical tokens differently. +#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)] +pub enum BorrowedToken<'input> { + Ident(&'input str), + Int(FieldElement), + Bool(bool), + Str(&'input str), + /// the u8 is the number of hashes, i.e. r###.. + RawStr(&'input str, u8), + FmtStr(&'input str), + Keyword(Keyword), + IntType(IntType), + Attribute(Attribute), + LineComment(&'input str, Option), + BlockComment(&'input str, Option), + /// < + Less, + /// <= + LessEqual, + /// > + Greater, + /// >= + GreaterEqual, + /// == + Equal, + /// != + NotEqual, + /// + + Plus, + /// - + Minus, + /// * + Star, + /// / + Slash, + /// % + Percent, + /// & + Ampersand, + /// ^ + Caret, + /// << + ShiftLeft, + /// >> + ShiftRight, + /// . + Dot, + /// .. + DoubleDot, + /// ( + LeftParen, + /// ) + RightParen, + /// { + LeftBrace, + /// } + RightBrace, + /// [ + LeftBracket, + /// ] + RightBracket, + /// -> + Arrow, + /// | + Pipe, + /// # + Pound, + /// , + Comma, + /// : + Colon, + /// :: + DoubleColon, + /// ; + Semicolon, + /// ! + Bang, + /// = + Assign, + #[allow(clippy::upper_case_acronyms)] + EOF, + + Whitespace(&'input str), + + /// An invalid character is one that is not in noir's language or grammar. + /// + /// We don't report invalid tokens in the source as errors until parsing to + /// avoid reporting the error twice (once while lexing, again when it is encountered + /// during parsing). Reporting during lexing then removing these from the token stream + /// would not be equivalent as it would change the resulting parse. + Invalid(char), +} + #[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)] pub enum Token { Ident(String), Int(FieldElement), Bool(bool), Str(String), + /// the u8 is the number of hashes, i.e. r###.. RawStr(String, u8), FmtStr(String), Keyword(Keyword), @@ -100,6 +193,57 @@ pub enum Token { Invalid(char), } +pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { + match token { + Token::Ident(ref s) => BorrowedToken::Ident(s), + Token::Int(n) => BorrowedToken::Int(*n), + Token::Bool(b) => BorrowedToken::Bool(*b), + Token::Str(ref b) => BorrowedToken::Str(b), + Token::FmtStr(ref b) => BorrowedToken::FmtStr(b), + Token::RawStr(ref b, hashes) => BorrowedToken::RawStr(b, *hashes), + Token::Keyword(k) => BorrowedToken::Keyword(*k), + Token::Attribute(ref a) => BorrowedToken::Attribute(a.clone()), + Token::LineComment(ref s, _style) => BorrowedToken::LineComment(s, *_style), + Token::BlockComment(ref s, _style) => BorrowedToken::BlockComment(s, *_style), + Token::IntType(ref i) => BorrowedToken::IntType(i.clone()), + Token::Less => BorrowedToken::Less, + Token::LessEqual => BorrowedToken::LessEqual, + Token::Greater => BorrowedToken::Greater, + Token::GreaterEqual => BorrowedToken::GreaterEqual, + Token::Equal => BorrowedToken::Equal, + Token::NotEqual => BorrowedToken::NotEqual, + Token::Plus => BorrowedToken::Plus, + Token::Minus => BorrowedToken::Minus, + Token::Star => BorrowedToken::Star, + Token::Slash => BorrowedToken::Slash, + Token::Percent => BorrowedToken::Percent, + Token::Ampersand => BorrowedToken::Ampersand, + Token::Caret => BorrowedToken::Caret, + Token::ShiftLeft => BorrowedToken::ShiftLeft, + Token::ShiftRight => BorrowedToken::ShiftRight, + Token::Dot => BorrowedToken::Dot, + Token::DoubleDot => BorrowedToken::DoubleDot, + Token::LeftParen => BorrowedToken::LeftParen, + Token::RightParen => BorrowedToken::RightParen, + Token::LeftBrace => BorrowedToken::LeftBrace, + Token::RightBrace => BorrowedToken::RightBrace, + Token::LeftBracket => BorrowedToken::LeftBracket, + Token::RightBracket => BorrowedToken::RightBracket, + Token::Arrow => BorrowedToken::Arrow, + Token::Pipe => BorrowedToken::Pipe, + Token::Pound => BorrowedToken::Pound, + Token::Comma => BorrowedToken::Comma, + Token::Colon => BorrowedToken::Colon, + Token::DoubleColon => BorrowedToken::DoubleColon, + Token::Semicolon => BorrowedToken::Semicolon, + Token::Assign => BorrowedToken::Assign, + Token::Bang => BorrowedToken::Bang, + Token::EOF => BorrowedToken::EOF, + Token::Invalid(c) => BorrowedToken::Invalid(*c), + Token::Whitespace(ref s) => BorrowedToken::Whitespace(s), + } +} + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] pub enum DocStyle { Outer, @@ -126,6 +270,12 @@ impl From for Token { } } +impl<'a> From<&'a SpannedToken> for &'a Token { + fn from(spt: &'a SpannedToken) -> Self { + &spt.0.contents + } +} + impl SpannedToken { pub fn new(token: Token, span: Span) -> SpannedToken { SpannedToken(Spanned::from(span, token)) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 7d20c2bcfee..d9c33d8604e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -92,6 +92,7 @@ pub enum Literal { Slice(ArrayLiteral), Integer(FieldElement, Type, Location), Bool(bool), + Unit, Str(String), FmtStr(String, u64, Box), } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 6aa0abce152..4e779244d30 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -52,14 +52,13 @@ struct LambdaContext { /// This struct holds the FIFO queue of functions to monomorphize, which is added to /// whenever a new (function, type) combination is encountered. struct Monomorphizer<'interner> { - /// Globals are keyed by their unique ID and expected type so that we can monomorphize - /// a new version of the global for each type. Note that 'global' here means 'globally - /// visible' and thus includes both functions and global variables. + /// Functions are keyed by their unique ID and expected type so that we can monomorphize + /// a new version of the function for each type. /// /// Using nested HashMaps here lets us avoid cloning HirTypes when calling .get() - globals: HashMap>, + functions: HashMap>, - /// Unlike globals, locals are only keyed by their unique ID because they are never + /// Unlike functions, locals are only keyed by their unique ID because they are never /// duplicated during monomorphization. Doing so would allow them to be used polymorphically /// but would also cause them to be re-evaluated which is a performance trap that would /// confuse users. @@ -165,7 +164,7 @@ pub fn monomorphize_debug( impl<'interner> Monomorphizer<'interner> { fn new(interner: &'interner mut NodeInterner, debug_type_tracker: DebugTypeTracker) -> Self { Monomorphizer { - globals: HashMap::new(), + functions: HashMap::new(), locals: HashMap::new(), queue: VecDeque::new(), finished_functions: BTreeMap::new(), @@ -203,7 +202,7 @@ impl<'interner> Monomorphizer<'interner> { trait_method: Option, ) -> Definition { let typ = typ.follow_bindings(); - match self.globals.get(&id).and_then(|inner_map| inner_map.get(&typ)) { + match self.functions.get(&id).and_then(|inner_map| inner_map.get(&typ)) { Some(id) => Definition::Function(*id), None => { // Function has not been monomorphized yet @@ -251,8 +250,8 @@ impl<'interner> Monomorphizer<'interner> { } /// Prerequisite: typ = typ.follow_bindings() - fn define_global(&mut self, id: node_interner::FuncId, typ: HirType, new_id: FuncId) { - self.globals.entry(id).or_default().insert(typ, new_id); + fn define_function(&mut self, id: node_interner::FuncId, typ: HirType, new_id: FuncId) { + self.functions.entry(id).or_default().insert(typ, new_id); } fn compile_main( @@ -786,7 +785,7 @@ impl<'interner> Monomorphizer<'interner> { }) } - /// A local (ie non-global) ident only + /// A local (ie non-function) ident only fn local_ident( &mut self, ident: &HirIdent, @@ -1280,7 +1279,7 @@ impl<'interner> Monomorphizer<'interner> { trait_method: Option, ) -> FuncId { let new_id = self.next_function_id(); - self.define_global(id, function_type.clone(), new_id); + self.define_function(id, function_type.clone(), new_id); let bindings = self.interner.get_instantiation_bindings(expr_id); let bindings = self.follow_bindings(bindings); @@ -1553,9 +1552,7 @@ impl<'interner> Monomorphizer<'interner> { ast::Expression::Literal(ast::Literal::Integer(0_u128.into(), typ, location)) } ast::Type::Bool => ast::Expression::Literal(ast::Literal::Bool(false)), - // There is no unit literal currently. Replace it with 'false' since it should be ignored - // anyway. - ast::Type::Unit => ast::Expression::Literal(ast::Literal::Bool(false)), + ast::Type::Unit => ast::Expression::Literal(ast::Literal::Unit), ast::Type::Array(length, element_type) => { let element = self.zeroed_value_of_type(element_type.as_ref(), location); ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs index c253bfe7930..ea8f079cc2f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs @@ -110,6 +110,9 @@ impl AstPrinter { s.fmt(f)?; write!(f, "\"") } + super::ast::Literal::Unit => { + write!(f, "()") + } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index ffd760d6d7f..84eb2d77315 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -917,6 +917,10 @@ impl NodeInterner { self.id_location(expr_id) } + pub fn statement_span(&self, stmt_id: &StmtId) -> Span { + self.id_location(stmt_id).span + } + pub fn get_struct(&self, id: StructId) -> Shared { self.structs[&id].clone() } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop b/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop new file mode 100644 index 00000000000..c8d293fb72f --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop @@ -0,0 +1,164 @@ +use noirc_errors::Span; + +use crate::lexer::token::BorrowedToken; +use crate::lexer::token as noir_token; +use crate::lexer::errors::LexerErrorKind; +use crate::parser::TopLevelStatement; +use crate::{Ident, Path, PathKind, UseTree, UseTreeKind}; + +use lalrpop_util::ErrorRecovery; + +grammar<'input, 'err>(input: &'input str, errors: &'err mut [ErrorRecovery, &'static str>]); + +extern { + type Location = usize; + + type Error = LexerErrorKind; + + // NOTE: each token needs a terminal defined + enum BorrowedToken<'input> { + string => BorrowedToken::Str(<&'input str>), + ident => BorrowedToken::Ident(<&'input str>), + + // symbols + "<" => BorrowedToken::Less, + "<=" => BorrowedToken::LessEqual, + ">" => BorrowedToken::Greater, + ">=" => BorrowedToken::GreaterEqual, + "==" => BorrowedToken::Equal, + "!=" => BorrowedToken::NotEqual, + "+" => BorrowedToken::Plus, + "-" => BorrowedToken::Minus, + "*" => BorrowedToken::Star, + "/" => BorrowedToken::Slash, + "%" => BorrowedToken::Percent, + "&" => BorrowedToken::Ampersand, + "^" => BorrowedToken::Caret, + "<<" => BorrowedToken::ShiftLeft, + ">>" => BorrowedToken::ShiftRight, + "." => BorrowedToken::Dot, + ".." => BorrowedToken::DoubleDot, + "(" => BorrowedToken::LeftParen, + ")" => BorrowedToken::RightParen, + "{" => BorrowedToken::LeftBrace, + "}" => BorrowedToken::RightBrace, + "[" => BorrowedToken::LeftBracket, + "]" => BorrowedToken::RightBracket, + "->" => BorrowedToken::Arrow, + "|" => BorrowedToken::Pipe, + "#" => BorrowedToken::Pound, + "," => BorrowedToken::Comma, + ":" => BorrowedToken::Colon, + "::" => BorrowedToken::DoubleColon, + ";" => BorrowedToken::Semicolon, + "!" => BorrowedToken::Bang, + "=" => BorrowedToken::Assign, + // keywords + "as" => BorrowedToken::Keyword(noir_token::Keyword::As), + "assert" => BorrowedToken::Keyword(noir_token::Keyword::Assert), + "assert_eq" => BorrowedToken::Keyword(noir_token::Keyword::AssertEq), + "bool" => BorrowedToken::Keyword(noir_token::Keyword::Bool), + "break" => BorrowedToken::Keyword(noir_token::Keyword::Break), + "call_data" => BorrowedToken::Keyword(noir_token::Keyword::CallData), + "char" => BorrowedToken::Keyword(noir_token::Keyword::Char), + "comptime" => BorrowedToken::Keyword(noir_token::Keyword::CompTime), + "constrain" => BorrowedToken::Keyword(noir_token::Keyword::Constrain), + "continue" => BorrowedToken::Keyword(noir_token::Keyword::Continue), + "contract" => BorrowedToken::Keyword(noir_token::Keyword::Contract), + "crate" => BorrowedToken::Keyword(noir_token::Keyword::Crate), + "dep" => BorrowedToken::Keyword(noir_token::Keyword::Dep), + "distinct" => BorrowedToken::Keyword(noir_token::Keyword::Distinct), + "else" => BorrowedToken::Keyword(noir_token::Keyword::Else), + "Field" => BorrowedToken::Keyword(noir_token::Keyword::Field), + "fn" => BorrowedToken::Keyword(noir_token::Keyword::Fn), + "for" => BorrowedToken::Keyword(noir_token::Keyword::For), + "fmtstr" => BorrowedToken::Keyword(noir_token::Keyword::FormatString), + "global" => BorrowedToken::Keyword(noir_token::Keyword::Global), + "if" => BorrowedToken::Keyword(noir_token::Keyword::If), + "impl" => BorrowedToken::Keyword(noir_token::Keyword::Impl), + "in" => BorrowedToken::Keyword(noir_token::Keyword::In), + "let" => BorrowedToken::Keyword(noir_token::Keyword::Let), + "mod" => BorrowedToken::Keyword(noir_token::Keyword::Mod), + "mut" => BorrowedToken::Keyword(noir_token::Keyword::Mut), + "pub" => BorrowedToken::Keyword(noir_token::Keyword::Pub), + "quote" => BorrowedToken::Keyword(noir_token::Keyword::Quote), + "return" => BorrowedToken::Keyword(noir_token::Keyword::Return), + "return_data" => BorrowedToken::Keyword(noir_token::Keyword::ReturnData), + "str" => BorrowedToken::Keyword(noir_token::Keyword::String), + "struct" => BorrowedToken::Keyword(noir_token::Keyword::Struct), + "trait" => BorrowedToken::Keyword(noir_token::Keyword::Trait), + "type" => BorrowedToken::Keyword(noir_token::Keyword::Type), + "unchecked" => BorrowedToken::Keyword(noir_token::Keyword::Unchecked), + "unconstrained" => BorrowedToken::Keyword(noir_token::Keyword::Unconstrained), + "use" => BorrowedToken::Keyword(noir_token::Keyword::Use), + "where" => BorrowedToken::Keyword(noir_token::Keyword::Where), + "while" => BorrowedToken::Keyword(noir_token::Keyword::While), + // bool + "true" => BorrowedToken::Bool(true), + "false" => BorrowedToken::Bool(false), + + r"[\t\r\n ]+" => BorrowedToken::Whitespace(_), + + EOF => BorrowedToken::EOF, + } +} + +pub(crate) TopLevelStatement: TopLevelStatement = { + "use" r"[\t\r\n ]+" ";" EOF => { + TopLevelStatement::Import(use_tree) + } +} + +UseTree: UseTree = { + // path::to::ident as SomeAlias + => { + let ident = prefix.pop(); + let kind = UseTreeKind::Path(ident, alias); + UseTree { prefix, kind } + }, +} + +pub(crate) Path: Path = { + "crate" "::" => { + let kind = PathKind::Crate; + let span = Span::from(lo as u32..hi as u32); + Path { segments, kind, span } + }, + + "dep" "::" => { + let kind = PathKind::Dep; + let span = Span::from(lo as u32..hi as u32); + Path { segments, kind, span } + }, + + => { + segments.insert(0, id); + let kind = PathKind::Plain; + let span = Span::from(lo as u32..hi as u32); + Path { segments, kind, span } + }, +} + +PathSegments: Vec = { + )*> => { + segments + } +} + +Alias: Ident = { + r"[\t\r\n ]+" "as" r"[\t\r\n ]+" => <>, +} + +Ident: Ident = { + => { + let token = noir_token::Token::Ident(i.to_string()); + let span = Span::from(lo as u32..hi as u32); + Ident::from_token(token, span) + }, +} + +Bool: BorrowedToken<'input> = { + "true" => BorrowedToken::Bool(true), + "false" => BorrowedToken::Bool(false), +}; + diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs index 43a1f96f13f..895d4e07bbd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs @@ -110,25 +110,31 @@ impl ParserError { impl std::fmt::Display for ParserError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let reason_str: String = if self.reason.is_none() { + "".to_string() + } else { + format!("\nreason: {}", Diagnostic::from(self.clone())) + }; let mut expected = vecmap(&self.expected_tokens, ToString::to_string); expected.append(&mut vecmap(&self.expected_labels, |label| format!("{label}"))); if expected.is_empty() { - write!(f, "Unexpected {} in input", self.found) + write!(f, "Unexpected {} in input{}", self.found, reason_str) } else if expected.len() == 1 { let first = expected.first().unwrap(); let vowel = "aeiou".contains(first.chars().next().unwrap()); write!( f, - "Expected a{} {} but found {}", + "Expected a{} {} but found {}{}", if vowel { "n" } else { "" }, first, - self.found + self.found, + reason_str ) } else { let expected = expected.iter().map(ToString::to_string).collect::>().join(", "); - write!(f, "Unexpected {}, expected one of {}", self.found, expected) + write!(f, "Unexpected {}, expected one of {}{}", self.found, expected, reason_str) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs index ea96dee8a47..80c5f47f07b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs @@ -97,14 +97,14 @@ where /// Sequence the two parsers. /// Fails if the first parser fails, otherwise forces /// the second parser to succeed while logging any errors. -fn then_commit<'a, P1, P2, T1, T2: 'a>( +fn then_commit<'a, P1, P2, T1, T2>( first_parser: P1, second_parser: P2, ) -> impl NoirParser<(T1, T2)> + 'a where P1: NoirParser + 'a, P2: NoirParser + 'a, - T2: Clone + Recoverable, + T2: Clone + Recoverable + 'a, { let second_parser = skip_then_retry_until(second_parser) .map_with_span(|option, span| option.unwrap_or_else(|| Recoverable::error(span))); @@ -112,14 +112,15 @@ where first_parser.then(second_parser) } -fn then_commit_ignore<'a, P1, P2, T1: 'a, T2: 'a>( +fn then_commit_ignore<'a, P1, P2, T1, T2>( first_parser: P1, second_parser: P2, ) -> impl NoirParser + 'a where P1: NoirParser + 'a, P2: NoirParser + 'a, - T2: Clone, + T1: 'a, + T2: Clone + 'a, { let second_parser = skip_then_retry_until(second_parser); first_parser.then_ignore(second_parser) @@ -140,10 +141,10 @@ where first_parser.ignore_then(second_parser) } -fn skip_then_retry_until<'a, P, T: 'a>(parser: P) -> impl NoirParser> + 'a +fn skip_then_retry_until<'a, P, T>(parser: P) -> impl NoirParser> + 'a where P: NoirParser + 'a, - T: Clone, + T: Clone + 'a, { let terminators = [ Token::EOF, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index 0a21465fe87..5706c3ef12f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -35,7 +35,7 @@ use super::{spanned, Item, ItemKind}; use crate::ast::{ Expression, ExpressionKind, LetStatement, StatementKind, UnresolvedType, UnresolvedTypeData, }; -use crate::lexer::Lexer; +use crate::lexer::{lexer::from_spanned_token_result, Lexer}; use crate::parser::{force, ignore_then_commit, statement_recovery}; use crate::token::{Keyword, Token, TokenKind}; use crate::{ @@ -47,6 +47,7 @@ use crate::{ use chumsky::prelude::*; use iter_extended::vecmap; +use lalrpop_util::lalrpop_mod; use noirc_errors::{Span, Spanned}; mod assertion; @@ -59,6 +60,9 @@ mod primitives; mod structs; mod traits; +// synthesized by LALRPOP +lalrpop_mod!(pub noir_parser); + #[cfg(test)] mod test_helpers; @@ -77,8 +81,79 @@ pub fn parse_program(source_program: &str) -> (ParsedModule, Vec) { let (module, mut parsing_errors) = program().parse_recovery_verbose(tokens); parsing_errors.extend(lexing_errors.into_iter().map(Into::into)); + let parsed_module = module.unwrap_or(ParsedModule { items: vec![] }); + + if cfg!(feature = "experimental_parser") { + for parsed_item in &parsed_module.items { + if lalrpop_parser_supports_kind(&parsed_item.kind) { + match &parsed_item.kind { + ItemKind::Import(parsed_use_tree) => { + prototype_parse_use_tree(Some(parsed_use_tree), source_program); + } + // other kinds prevented by lalrpop_parser_supports_kind + _ => unreachable!(), + } + } + } + } + (parsed_module, parsing_errors) +} + +fn prototype_parse_use_tree(expected_use_tree_opt: Option<&UseTree>, input: &str) { + // TODO(https://github.com/noir-lang/noir/issues/4777): currently skipping + // recursive use trees, e.g. "use std::{foo, bar}" + if input.contains('{') { + return; + } + + let mut lexer = Lexer::new(input); + lexer = lexer.skip_whitespaces(false); + let mut errors = Vec::new(); + + // NOTE: this is a hack to get the references working + // => this likely means that we'll want to propagate the <'input> lifetime further into Token + let lexer_result = lexer.collect::>(); + let referenced_lexer_result = lexer_result.iter().map(from_spanned_token_result); + + let calculated = noir_parser::TopLevelStatementParser::new().parse( + input, + &mut errors, + referenced_lexer_result, + ); + + if let Some(expected_use_tree) = expected_use_tree_opt { + assert!( + calculated.is_ok(), + "calculated not Ok(_): {:?}\n\nlexer: {:?}\n\ninput: {:?}", + calculated, + lexer_result, + input + ); + + match calculated.unwrap() { + TopLevelStatement::Import(parsed_use_tree) => { + assert_eq!(expected_use_tree, &parsed_use_tree); + } + unexpected_calculated => { + panic!( + "expected a TopLevelStatement::Import, but found: {:?}", + unexpected_calculated + ) + } + } + } else { + assert!( + calculated.is_err(), + "calculated not Err(_): {:?}\n\nlexer: {:?}\n\ninput: {:?}", + calculated, + lexer_result, + input + ); + } +} - (module.unwrap_or(ParsedModule { items: vec![] }), parsing_errors) +fn lalrpop_parser_supports_kind(kind: &ItemKind) -> bool { + matches!(kind, ItemKind::Import(_)) } /// program: module EOF @@ -1512,33 +1587,53 @@ mod test { #[test] fn parse_use() { - parse_all( - use_statement(), - vec![ - "use std::hash", - "use std", - "use foo::bar as hello", - "use bar as bar", - "use foo::{}", - "use foo::{bar,}", - "use foo::{bar, hello}", - "use foo::{bar as bar2, hello}", - "use foo::{bar as bar2, hello::{foo}, nested::{foo, bar}}", - "use dep::{std::println, bar::baz}", - ], - ); + let valid_use_statements = [ + "use std::hash", + "use std", + "use foo::bar as hello", + "use bar as bar", + "use foo::{}", + "use foo::{bar,}", + "use foo::{bar, hello}", + "use foo::{bar as bar2, hello}", + "use foo::{bar as bar2, hello::{foo}, nested::{foo, bar}}", + "use dep::{std::println, bar::baz}", + ]; - parse_all_failing( - use_statement(), - vec![ - "use std as ;", - "use foobar as as;", - "use hello:: as foo;", - "use foo bar::baz", - "use foo bar::{baz}", - "use foo::{,}", - ], - ); + let invalid_use_statements = [ + "use std as ;", + "use foobar as as;", + "use hello:: as foo;", + "use foo bar::baz", + "use foo bar::{baz}", + "use foo::{,}", + ]; + + let use_statements = valid_use_statements + .into_iter() + .map(|valid_str| (valid_str, true)) + .chain(invalid_use_statements.into_iter().map(|invalid_str| (invalid_str, false))); + + for (use_statement_str, expect_valid) in use_statements { + let mut use_statement_str = use_statement_str.to_string(); + let expected_use_statement = if expect_valid { + let (result_opt, _diagnostics) = + parse_recover(&use_statement(), &use_statement_str); + use_statement_str.push(';'); + match result_opt.unwrap() { + TopLevelStatement::Import(expected_use_statement) => { + Some(expected_use_statement) + } + _ => unreachable!(), + } + } else { + let result = parse_with(&use_statement(), &use_statement_str); + assert!(result.is_err()); + None + }; + + prototype_parse_use_tree(expected_use_statement.as_ref(), &use_statement_str); + } } #[test] diff --git a/noir/noir-repo/compiler/wasm/src/types/noir_artifact.ts b/noir/noir-repo/compiler/wasm/src/types/noir_artifact.ts index f241b539dc7..6ecc3ccd56f 100644 --- a/noir/noir-repo/compiler/wasm/src/types/noir_artifact.ts +++ b/noir/noir-repo/compiler/wasm/src/types/noir_artifact.ts @@ -151,6 +151,16 @@ export interface DebugInfo { locations: Record; } +/** + * The debug information for a given program. + */ +export interface ProgramDebugInfo { + /** + * An array that maps to each function of a program. + */ + debug_infos: Array; +} + /** * Maps a file ID to its metadata for debugging purposes. */ diff --git a/noir/noir-repo/compiler/wasm/test/compiler/shared/compile.test.ts b/noir/noir-repo/compiler/wasm/test/compiler/shared/compile.test.ts index 52cef14968b..f9e37530cbc 100644 --- a/noir/noir-repo/compiler/wasm/test/compiler/shared/compile.test.ts +++ b/noir/noir-repo/compiler/wasm/test/compiler/shared/compile.test.ts @@ -5,6 +5,7 @@ import { ContractCompilationArtifacts, DebugFileMap, DebugInfo, + ProgramDebugInfo, NoirFunctionEntry, ProgramArtifact, ProgramCompilationArtifacts, @@ -15,7 +16,7 @@ export function shouldCompileProgramIdentically( expect: typeof Expect, timeout = 5000, ) { - it('both nargo and noir_wasm should compile identically', async () => { + it('both nargo and noir_wasm should compile program identically', async () => { // Compile! const { nargoArtifact, noirWasmArtifact } = await compileFn(); @@ -51,7 +52,7 @@ export function shouldCompileContractIdentically( expect: typeof Expect, timeout = 5000, ) { - it('both nargo and noir_wasm should compile identically', async () => { + it('both nargo and noir_wasm should compile contract identically', async () => { // Compile! const { nargoArtifact, noirWasmArtifact } = await compileFn(); @@ -90,7 +91,7 @@ function extractDebugInfos(fns: NoirFunctionEntry[]) { return fns.map((fn) => { const debugSymbols = inflateDebugSymbols(fn.debug_symbols); delete (fn as Partial).debug_symbols; - clearFileIdentifiers(debugSymbols); + clearFileIdentifiersProgram(debugSymbols); return debugSymbols; }); } @@ -113,6 +114,12 @@ function deleteContractDebugMetadata(contract: ContractArtifact) { return [extractDebugInfos(contract.functions), fileMap]; } +function clearFileIdentifiersProgram(debugSymbols: ProgramDebugInfo) { + debugSymbols.debug_infos.map((debug_info) => { + clearFileIdentifiers(debug_info); + }); +} + /** Clears file identifiers from a set of debug symbols. */ function clearFileIdentifiers(debugSymbols: DebugInfo) { for (const loc of Object.values(debugSymbols.locations)) { diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 16de9757fb8..bf3040265c2 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -113,6 +113,7 @@ "Maddiaa", "mathbb", "memfs", + "memset", "merkle", "metas", "minreq", diff --git a/noir/noir-repo/deny.toml b/noir/noir-repo/deny.toml index eff233687e8..db7e53cad24 100644 --- a/noir/noir-repo/deny.toml +++ b/noir/noir-repo/deny.toml @@ -58,7 +58,7 @@ allow = [ # bitmaps 2.1.0, im 15.1.0 "MPL-2.0", # Boost Software License - "BSL-1.0", + "BSL-1.0" ] # Allow 1 or more licenses on a per-crate basis, so that particular licenses @@ -70,6 +70,7 @@ exceptions = [ { allow = ["CC0-1.0"], name = "more-asserts" }, { allow = ["CC0-1.0"], name = "jsonrpc" }, { allow = ["CC0-1.0"], name = "notify" }, + { allow = ["CC0-1.0"], name = "tiny-keccak" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, diff --git a/noir/noir-repo/docs/docs/getting_started/tooling/index.mdx b/noir/noir-repo/docs/docs/getting_started/tooling/index.mdx deleted file mode 100644 index ac480f3c9f5..00000000000 --- a/noir/noir-repo/docs/docs/getting_started/tooling/index.mdx +++ /dev/null @@ -1,38 +0,0 @@ ---- -title: Tooling -Description: This section provides information about the various tools and utilities available for Noir development. It covers the Noir playground, IDE tools, Codespaces, and community projects. -Keywords: [Noir, Development, Playground, IDE Tools, Language Service Provider, VS Code Extension, Codespaces, noir-starter, Community Projects, Awesome Noir Repository, Developer Tooling] ---- - -Noir is meant to be easy to develop with. For that reason, a number of utilities have been put together to ease the development process as much as feasible in the zero-knowledge world. - -## Playground - -The Noir playground is an easy way to test small ideas, share snippets, and integrate in other websites. You can access it at [play.noir-lang.org](https://play.noir-lang.org). - -## IDE tools - -When you install Nargo, you're also installing a Language Service Provider (LSP), which can be used by IDEs to provide syntax highlighting, codelens, warnings, and more. - -The easiest way to use these tools is by installing the [Noir VS Code extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). - -## Codespaces - -Some Noir repos have leveraged Codespaces in order to ease the development process. You can visit the [noir-starter](https://github.com/noir-lang/noir-starter) for an example. - - - -## GitHub Actions - -You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as -installing `noirup` and running tests in your GitHub Action `yml` file. - -See the -[config file in the Noir repo](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) for an example usage. - -## Community projects - -As an open-source project, Noir has received many contributions over time. Some of them are related with developer tooling, and you can see some of them in [Awesome Noir repository](https://github.com/noir-lang/awesome-noir#dev-tools) diff --git a/noir/noir-repo/docs/docs/how_to/debugger/_category_.json b/noir/noir-repo/docs/docs/how_to/debugger/_category_.json new file mode 100644 index 00000000000..cc2cbb1c253 --- /dev/null +++ b/noir/noir-repo/docs/docs/how_to/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugging", + "position": 5, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/docs/how_to/debugger/debugging_with_the_repl.md b/noir/noir-repo/docs/docs/how_to/debugger/debugging_with_the_repl.md new file mode 100644 index 00000000000..09e5bae68ad --- /dev/null +++ b/noir/noir-repo/docs/docs/how_to/debugger/debugging_with_the_repl.md @@ -0,0 +1,164 @@ +--- +title: Using the REPL Debugger +description: + Step by step guide on how to debug your Noir circuits with the REPL Debugger. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +#### Pre-requisites + +In order to use the REPL debugger, first you need to install recent enough versions of Nargo and vscode-noir. + +## Debugging a simple circuit + +Let's debug a simple circuit: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +To start the REPL debugger, using a terminal, go to a Noir circuit's home directory. Then: + +`$ nargo debug` + +You should be seeing this in your terminal: + +``` +[main] Starting debugger +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:9 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> +``` + +The debugger displays the current Noir code location, and it is now waiting for us to drive it. + +Let's first take a look at the available commands. For that we'll use the `help` command. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +Some commands operate only for unconstrained functions, such as `memory` and `memset`. If you try to use them while execution is paused at an ACIR opcode, the debugger will simply inform you that you are not executing unconstrained code: + +``` +> memory +Unconstrained VM memory not available +> +``` + +Before continuing, we can take a look at the initial witness map: + +``` +> witness +_0 = 1 +_1 = 2 +> +``` + +Cool, since `x==1`, `y==2`, and we want to check that `x != y`, our circuit should succeed. At this point we could intervene and use the witness setter command to change one of the witnesses. Let's set `y=3`, then back to 2, so we don't affect the expected result: + +``` +> witness +_0 = 1 +_1 = 2 +> witness 1 3 +_1 = 3 +> witness +_0 = 1 +_1 = 3 +> witness 1 2 +_1 = 2 +> witness +_0 = 1 +_1 = 2 +> +``` + +Now we can inspect the current state of local variables. For that we use the `vars` command. + +``` +> vars +> +``` + +We currently have no vars in context, since we are at the entry point of the program. Let's use `next` to execute until the next point in the program. + +``` +> vars +> next +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:20 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> vars +x:Field = 0x01 +``` + +As a result of stepping, the variable `x`, whose initial value comes from the witness map, is now in context and returned by `vars`. + +``` +> next + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> vars +y:Field = 0x02 +x:Field = 0x01 +``` + +Stepping again we can finally see both variables and their values. And now we can see that the next assertion should succeed. + +Let's continue to the end: + +``` +> continue +(Continuing execution...) +Finished execution +> q +[main] Circuit witness successfully solved +``` + +Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. + +We just went through the basics of debugging using Noir REPL debugger. For a comprehensive reference, check out [the reference page](../../reference/debugger/debugger_repl.md). diff --git a/noir/noir-repo/docs/docs/how_to/debugger/debugging_with_vs_code.md b/noir/noir-repo/docs/docs/how_to/debugger/debugging_with_vs_code.md new file mode 100644 index 00000000000..a5858c1a5eb --- /dev/null +++ b/noir/noir-repo/docs/docs/how_to/debugger/debugging_with_vs_code.md @@ -0,0 +1,68 @@ +--- +title: Using the VS Code Debugger +description: + Step by step guide on how to debug your Noir circuits with the VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +This guide will show you how to use VS Code with the vscode-noir extension to debug a Noir project. + +#### Pre-requisites + +- Nargo +- vscode-noir +- A Noir project with a `Nargo.toml`, `Prover.toml` and at least one Noir (`.nr`) containing an entry point function (typically `main`). + +## Running the debugger + +The easiest way to start debugging is to open the file you want to debug, and press `F5`. This will cause the debugger to launch, using your `Prover.toml` file as input. + +You should see something like this: + +![Debugger launched](@site/static/img/debugger/1-started.png) + +Let's inspect the state of the program. For that, we open VS Code's _Debug pane_. Look for this icon: + +![Debug pane icon](@site/static/img/debugger/2-icon.png) + +You will now see two categories of variables: Locals and Witness Map. + +![Debug pane expanded](@site/static/img/debugger/3-debug-pane.png) + +1. **Locals**: variables of your program. At this point in execution this section is empty, but as we step through the code it will get populated by `x`, `result`, `digest`, etc. + +2. **Witness map**: these are initially populated from your project's `Prover.toml` file. In this example, they will be used to populate `x` and `result` at the beginning of the `main` function. + +Most of the time you will probably be focusing mostly on locals, as they represent the high level state of your program. + +You might be interested in inspecting the witness map in case you are trying to solve a really low level issue in the compiler or runtime itself, so this concerns mostly advanced or niche users. + +Let's step through the program, by using the debugger buttons or their corresponding keyboard shortcuts. + +![Debugger buttons](@site/static/img/debugger/4-debugger-buttons.png) + +Now we can see in the variables pane that there's values for `digest`, `result` and `x`. + +![Inspecting locals](@site/static/img/debugger/5-assert.png) + +We can also inspect the values of variables by directly hovering on them on the code. + +![Hover locals](@site/static/img/debugger/6-hover.png) + +Let's set a break point at the `keccak256` function, so we can continue execution up to the point when it's first invoked without having to go one step at a time. + +We just need to click the to the right of the line number 18. Once the breakpoint appears, we can click the `continue` button or use its corresponding keyboard shortcut (`F5` by default). + +![Breakpoint](@site/static/img/debugger/7-break.png) + +Now we are debugging the `keccak256` function, notice the _Call Stack pane_ at the lower right. This lets us inspect the current call stack of our process. + +That covers most of the current debugger functionalities. Check out [the reference](../../reference/debugger/debugger_vscode.md) for more details on how to configure the debugger. \ No newline at end of file diff --git a/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx b/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx index 003c7019a93..16c425bed76 100644 --- a/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx +++ b/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx @@ -5,6 +5,7 @@ description: merkle tree with a specified root, at a given index. keywords: [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +sidebar_position: 4 --- Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is diff --git a/noir/noir-repo/docs/docs/noir/concepts/functions.md b/noir/noir-repo/docs/docs/noir/concepts/functions.md index 2c9bc33fdfc..f656cdfd97a 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/functions.md +++ b/noir/noir-repo/docs/docs/noir/concepts/functions.md @@ -62,7 +62,7 @@ fn main(x : [Field]) // can't compile, has variable size fn main(....// i think you got it by now ``` -Keep in mind [tests](../../getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: +Keep in mind [tests](../../tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: ```rust fn main(x : [Field]) { @@ -190,7 +190,7 @@ Supported attributes include: - **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` - **field**: Used to enable conditional compilation of code depending on the field size. See below for more details - **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. -- **test**: mark the function as unit tests. See [Tests](../../getting_started/tooling/testing.md) for more details +- **test**: mark the function as unit tests. See [Tests](../../tooling/testing.md) for more details ### Field Attribute diff --git a/noir/noir-repo/docs/docs/noir/concepts/oracles.md b/noir/noir-repo/docs/docs/noir/concepts/oracles.md index 2e6a6818d48..aa380b5f7b8 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/oracles.md +++ b/noir/noir-repo/docs/docs/noir/concepts/oracles.md @@ -11,6 +11,12 @@ keywords: sidebar_position: 6 --- +:::note + +This is an experimental feature that is not fully documented. If you notice any outdated information or potential improvements to this page, pull request contributions are very welcome: https://github.com/noir-lang/noir + +::: + Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) @@ -21,3 +27,5 @@ You can declare an Oracle through the `#[oracle()]` flag. Example: #[oracle(get_number_sequence)] unconstrained fn get_number_sequence(_size: Field) -> [Field] {} ``` + +The timeout for when using an external RPC oracle resolver can be set with the `NARGO_FOREIGN_CALL_TIMEOUT` environment variable. This timeout is in units of milliseconds. diff --git a/noir/noir-repo/docs/docs/getting_started/tooling/_category_.json b/noir/noir-repo/docs/docs/reference/debugger/_category_.json similarity index 53% rename from noir/noir-repo/docs/docs/getting_started/tooling/_category_.json rename to noir/noir-repo/docs/docs/reference/debugger/_category_.json index 55804c03a71..27869205ad3 100644 --- a/noir/noir-repo/docs/docs/getting_started/tooling/_category_.json +++ b/noir/noir-repo/docs/docs/reference/debugger/_category_.json @@ -1,6 +1,6 @@ { - "position": 2, - "label": "Tooling", + "label": "Debugger", + "position": 1, "collapsible": true, "collapsed": true } diff --git a/noir/noir-repo/docs/docs/reference/debugger/debugger_known_limitations.md b/noir/noir-repo/docs/docs/reference/debugger/debugger_known_limitations.md new file mode 100644 index 00000000000..936d416ac4b --- /dev/null +++ b/noir/noir-repo/docs/docs/reference/debugger/debugger_known_limitations.md @@ -0,0 +1,59 @@ +--- +title: Known limitations +description: + An overview of known limitations of the current version of the Noir debugger +keywords: + [ + Nargo, + Noir Debugger, + VS Code, + ] +sidebar_position: 2 +--- + +# Debugger Known Limitations + +There are currently some limits to what the debugger can observe. + +## Mutable references + +The debugger is currently blind to any state mutated via a mutable reference. For example, in: + +``` +let mut x = 1; +let y = &mut x; +*y = 2; +``` + +The update on `x` will not be observed by the debugger. That means, when running `vars` from the debugger REPL, or inspecting the _local variables_ pane in the VS Code debugger, `x` will appear with value 1 despite having executed `*y = 2;`. + +## Variables of type function or mutable references are opaque + +When inspecting variables, any variable of type `Function` or `MutableReference` will render its value as `<>` or `<>`. + +## Debugger instrumentation affects resulting ACIR + +In order to make the state of local variables observable, the debugger compiles Noir circuits interleaving foreign calls that track any mutations to them. While this works (except in the cases described above) and doesn't introduce any behavior changes, it does as a side effect produce bigger bytecode. In particular, when running the command `opcodes` on the REPL debugger, you will notice Unconstrained VM blocks that look like this: + +``` +... +5 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [], q_c: 2 }), Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(2))], q_c: 0 })] + | outputs=[] + 5.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 5.1 | Mov { destination: RegisterIndex(3), source: RegisterIndex(1) } + 5.2 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 5.3 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 5.4 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 5.5 | Mov { destination: RegisterIndex(3), source: RegisterIndex(3) } + 5.6 | Call { location: 8 } + 5.7 | Stop + 5.8 | ForeignCall { function: "__debug_var_assign", destinations: [], inputs: [RegisterIndex(RegisterIndex(2)), RegisterIndex(RegisterIndex(3))] } +... +``` + +If you are interested in debugging/inspecting compiled ACIR without these synthetic changes, you can invoke the REPL debugger with the `--skip-instrumentation` flag or launch the VS Code debugger with the `skipConfiguration` property set to true in its launch configuration. You can find more details about those in the [Debugger REPL reference](debugger_repl.md) and the [VS Code Debugger reference](debugger_vscode.md). + +:::note +Skipping debugger instrumentation means you won't be able to inspect values of local variables. +::: + diff --git a/noir/noir-repo/docs/docs/reference/debugger/debugger_repl.md b/noir/noir-repo/docs/docs/reference/debugger/debugger_repl.md new file mode 100644 index 00000000000..46e2011304e --- /dev/null +++ b/noir/noir-repo/docs/docs/reference/debugger/debugger_repl.md @@ -0,0 +1,360 @@ +--- +title: REPL Debugger +description: + Noir Debugger REPL options and commands. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +## Running the REPL debugger + +`nargo debug [OPTIONS] [WITNESS_NAME]` + +Runs the Noir REPL debugger. If a `WITNESS_NAME` is provided the debugger writes the resulting execution witness to a `WITNESS_NAME` file. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover]| +| `--package ` | The name of the package to debug | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +None of these options are required. + +:::note +Since the debugger starts by compiling the target package, all Noir compiler options are also available. Check out the [compiler reference](../nargo_commands.md#nargo-compile) to learn more about the compiler options. +::: + +## REPL commands + +Once the debugger is running, it accepts the following commands. + +#### `help` (h) + +Displays the menu of available commands. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) value + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +### Stepping through programs + +#### `next` (n) + +Step until the next Noir source code location. While other commands, such as [`into`](#into-i) and [`step`](#step-s), allow for finer grained control of the program's execution at the opcode level, `next` is source code centric. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `next` here would cause the debugger to jump to the definition of `deep_entry_point` (if available). + +If you want to step over `deep_entry_point` and go straight to line 8, use [the `over` command](#over) instead. + +#### `over` + +Step until the next source code location, without diving into function calls. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `over` here would cause the debugger to execute until line 8 (`multiple_values_entry_point(x);`). + +If you want to step into `deep_entry_point` instead, use [the `next` command](#next-n). + +#### `out` + +Step until the end of the current function call. For example: + +``` + 3 ... + 4 fn main(x: u32) { + 5 assert(entry_point(x) == 2); + 6 swap_entry_point(x, x + 1); + 7 -> assert(deep_entry_point(x) == 4); + 8 multiple_values_entry_point(x); + 9 } + 10 + 11 unconstrained fn returns_multiple_values(x: u32) -> (u32, u32, u32, u32) { + 12 ... + ... + 55 + 56 unconstrained fn deep_entry_point(x: u32) -> u32 { + 57 -> level_1(x + 1) + 58 } + +``` + +Running `out` here will resume execution until line 8. + +#### `step` (s) + +Skips to the next ACIR code. A compiled Noir program is a sequence of ACIR opcodes. However, an unconstrained VM opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `step` command at this point would result in the debugger stopping at ACIR opcode 2, `EXPR`, skipping unconstrained computation steps. + +Use [the `into` command](#into-i) instead if you want to follow unconstrained computation step by step. + +#### `into` (i) + +Steps into the next opcode. A compiled Noir program is a sequence of ACIR opcodes. However, a BRILLIG opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `into` command at this point would result in the debugger stopping at opcode 1.0, `Mov ...`, allowing the debugger user to follow unconstrained computation step by step. + +Use [the `step` command](#step-s) instead if you want to skip to the next ACIR code directly. + +#### `continue` (c) + +Continues execution until the next breakpoint, or the end of the program. + +#### `restart` (res) + +Interrupts execution, and restarts a new debugging session from scratch. + +#### `opcodes` (o) + +Display the program's ACIR opcode sequence. For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +### Breakpoints + +#### `break [Opcode]` (or shorthand `b [Opcode]`) + +Sets a breakpoint on the specified opcode index. To get a list of the program opcode numbers, see [the `opcode` command](#opcodes-o). For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +In this example, issuing a `break 1.2` command adds break on opcode 1.2, as denoted by the `*` character: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | * Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +Running [the `continue` command](#continue-c) at this point would cause the debugger to execute the program until opcode 1.2. + +#### `delete [Opcode]` (or shorthand `d [Opcode]`) + +Deletes a breakpoint at an opcode location. Usage is analogous to [the `break` command](#). + +### Variable inspection + +#### vars + +Show variable values available at this point in execution. + +:::note +The ability to inspect variable values from the debugger depends on compilation to be run in a special debug instrumentation mode. This instrumentation weaves variable tracing code with the original source code. + +So variable value inspection comes at the expense of making the resulting ACIR bytecode bigger and harder to understand and optimize. + +If you find this compromise unacceptable, you can run the debugger with the flag `--skip-debug-instrumentation`. This will compile your circuit without any additional debug information, so the resulting ACIR bytecode will be identical to the one produced by standard Noir compilation. However, if you opt for this, the `vars` command will not be available while debugging. +::: + + +### Stacktrace + +#### `stacktrace` + +Displays the current stack trace. + + +### Witness map + +#### `witness` (w) + +Show witness map. For example: + +``` +_0 = 0 +_1 = 2 +_2 = 1 +``` + +#### `witness [Witness Index]` + +Display a single witness from the witness map. For example: + +``` +> witness 1 +_1 = 2 +``` + +#### `witness [Witness Index] [New value]` + +Overwrite the given index with a new value. For example: + +``` +> witness 1 3 +_1 = 3 +``` + + +### Unconstrained VM memory + +#### `memory` + +Show unconstrained VM memory state. For example: + +``` +> memory +At opcode 1.13: Store { destination_pointer: RegisterIndex(0), source: RegisterIndex(3) } +... +> registers +0 = 0 +1 = 10 +2 = 0 +3 = 1 +4 = 1 +5 = 2³² +6 = 1 +> into +At opcode 1.14: Const { destination: RegisterIndex(5), value: Value { inner: 1 } } +... +> memory +0 = 1 +> +``` + +In the example above: we start with clean memory, then step through a `Store` opcode which stores the value of register 3 (1) into the memory address stored in register 0 (0). Thus now `memory` shows memory address 0 contains value 1. + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: + +#### `memset [Memory address] [New value]` + +Update a memory cell with the given value. For example: + +``` +> memory +0 = 1 +> memset 0 2 +> memory +0 = 2 +> memset 1 4 +> memory +0 = 2 +1 = 4 +> +``` + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: \ No newline at end of file diff --git a/noir/noir-repo/docs/docs/reference/debugger/debugger_vscode.md b/noir/noir-repo/docs/docs/reference/debugger/debugger_vscode.md new file mode 100644 index 00000000000..c027332b3b0 --- /dev/null +++ b/noir/noir-repo/docs/docs/reference/debugger/debugger_vscode.md @@ -0,0 +1,82 @@ +--- +title: VS Code Debugger +description: + VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +# VS Code Noir Debugger Reference + +The Noir debugger enabled by the vscode-noir extension ships with default settings such that the most common scenario should run without any additional configuration steps. + +These defaults can nevertheless be overridden by defining a launch configuration file. This page provides a reference for the properties you can override via a launch configuration file, as well as documenting the Nargo `dap` command, which is a dependency of the VS Code Noir debugger. + + +## Creating and editing launch configuration files + +To create a launch configuration file from VS Code, open the _debug pane_, and click on _create a launch.json file_. + +![Creating a launch configuration file](@site/static/img/debugger/ref1-create-launch.png) + +A `launch.json` file will be created, populated with basic defaults. + +### Noir Debugger launch.json properties + +#### projectFolder + +_String, optional._ + +Absolute path to the Nargo project to debug. By default, it is dynamically determined by looking for the nearest `Nargo.toml` file to the active file at the moment of launching the debugger. + +#### proverName + +_String, optional._ + +Name of the prover input to use. Defaults to `Prover`, which looks for a file named `Prover.toml` at the `projectFolder`. + +#### generateAcir + +_Boolean, optional._ + +If true, generate ACIR opcodes instead of unconstrained opcodes which will be closer to release binaries but less convenient for debugging. Defaults to `false`. + +#### skipInstrumentation + +_Boolean, optional._ + +Skips variables debugging instrumentation of code, making debugging less convenient but the resulting binary smaller and closer to production. Defaults to `false`. + +:::note +Skipping instrumentation causes the debugger to be unable to inspect local variables. +::: + +## `nargo dap [OPTIONS]` + +When run without any option flags, it starts the Nargo Debug Adapter Protocol server, which acts as the debugging backend for the VS Code Noir Debugger. + +All option flags are related to preflight checks. The Debug Adapter Protocol specifies how errors are to be informed from a running DAP server, but it doesn't specify mechanisms to communicate server initialization errors between the DAP server and its client IDE. + +Thus `nargo dap` ships with a _preflight check_ mode. If flag `--preflight-check` and the rest of the `--preflight-*` flags are provided, Nargo will run the same initialization routine except it will not start the DAP server. + +`vscode-noir` will then run `nargo dap` in preflight check mode first before a debugging session starts. If the preflight check ends in error, vscode-noir will present stderr and stdout output from this process through its own Output pane in VS Code. This makes it possible for users to diagnose what pieces of configuration might be wrong or missing in case of initialization errors. + +If the preflight check succeeds, `vscode-noir` proceeds to start the DAP server normally but running `nargo dap` without any additional flags. + +### Options + +| Option | Description | +| --------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| `--preflight-check` | If present, dap runs in preflight check mode. | +| `--preflight-project-folder ` | Absolute path to the project to debug for preflight check. | +| `--preflight-prover-name ` | Name of prover file to use for preflight check | +| `--preflight-generate-acir` | Optional. If present, compile in ACIR mode while running preflight check. | +| `--preflight-skip-instrumentation` | Optional. If present, compile without introducing debug instrumentation while running preflight check. | +| `-h, --help` | Print help. | diff --git a/noir/noir-repo/docs/docs/tooling/debugger.md b/noir/noir-repo/docs/docs/tooling/debugger.md new file mode 100644 index 00000000000..184c436068f --- /dev/null +++ b/noir/noir-repo/docs/docs/tooling/debugger.md @@ -0,0 +1,27 @@ +--- +title: Debugger +description: Learn about the Noir Debugger, in its REPL or VS Code versions. +keywords: [Nargo, VSCode, Visual Studio Code, REPL, Debugger] +sidebar_position: 2 +--- + +# Noir Debugger + +There are currently two ways of debugging Noir programs: + +1. From VS Code, via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). +2. Via the REPL debugger, which ships with Nargo. + +In order to use either version of the debugger, you will need to install recent enough versions of Noir, [Nargo](../getting_started/installation) and vscode-noir: + +- Noir 0.xx +- Nargo 0.xx +- vscode-noir 0.xx + +:::info +At the moment, the debugger supports debugging binary projects, but not contracts. +::: + +We cover the VS Code Noir debugger more in depth in [its VS Code debugger how-to guide](../how_to/debugger/debugging_with_vs_code.md) and [the reference](../reference/debugger/debugger_vscode.md). + +The REPL debugger is discussed at length in [the REPL debugger how-to guide](../how_to/debugger/debugging_with_the_repl.md) and [the reference](../reference/debugger/debugger_repl.md). diff --git a/noir/noir-repo/docs/docs/getting_started/tooling/language_server.md b/noir/noir-repo/docs/docs/tooling/language_server.md similarity index 100% rename from noir/noir-repo/docs/docs/getting_started/tooling/language_server.md rename to noir/noir-repo/docs/docs/tooling/language_server.md diff --git a/noir/noir-repo/docs/docs/getting_started/tooling/testing.md b/noir/noir-repo/docs/docs/tooling/testing.md similarity index 100% rename from noir/noir-repo/docs/docs/getting_started/tooling/testing.md rename to noir/noir-repo/docs/docs/tooling/testing.md diff --git a/noir/noir-repo/docs/sidebars.js b/noir/noir-repo/docs/sidebars.js index f1e79ba9ebc..cf7e852fed5 100644 --- a/noir/noir-repo/docs/sidebars.js +++ b/noir/noir-repo/docs/sidebars.js @@ -65,6 +65,11 @@ export default { label: 'Reference', items: [{ type: 'autogenerated', dirName: 'reference' }], }, + { + type: 'category', + label: 'Tooling', + items: [{ type: 'autogenerated', dirName: 'tooling' }], + }, { type: 'html', value: '
', diff --git a/noir/noir-repo/docs/static/img/debugger/1-started.png b/noir/noir-repo/docs/static/img/debugger/1-started.png new file mode 100644 index 00000000000..6f764d4e601 Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/1-started.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/2-icon.png b/noir/noir-repo/docs/static/img/debugger/2-icon.png new file mode 100644 index 00000000000..31706670ccb Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/2-icon.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/3-debug-pane.png b/noir/noir-repo/docs/static/img/debugger/3-debug-pane.png new file mode 100644 index 00000000000..24c112da96f Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/3-debug-pane.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/4-debugger-buttons.png b/noir/noir-repo/docs/static/img/debugger/4-debugger-buttons.png new file mode 100644 index 00000000000..64c1e05be8a Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/4-debugger-buttons.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/5-assert.png b/noir/noir-repo/docs/static/img/debugger/5-assert.png new file mode 100644 index 00000000000..0bfed6562af Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/5-assert.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/6-hover.png b/noir/noir-repo/docs/static/img/debugger/6-hover.png new file mode 100644 index 00000000000..20579ec461e Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/6-hover.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/7-break.png b/noir/noir-repo/docs/static/img/debugger/7-break.png new file mode 100644 index 00000000000..aca5121d722 Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/7-break.png differ diff --git a/noir/noir-repo/docs/static/img/debugger/debugger-intro.gif b/noir/noir-repo/docs/static/img/debugger/debugger-intro.gif new file mode 100644 index 00000000000..06e3b853555 Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/debugger-intro.gif differ diff --git a/noir/noir-repo/docs/static/img/debugger/ref1-create-launch.png b/noir/noir-repo/docs/static/img/debugger/ref1-create-launch.png new file mode 100644 index 00000000000..0b6cb8b3ec6 Binary files /dev/null and b/noir/noir-repo/docs/static/img/debugger/ref1-create-launch.png differ diff --git a/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/Nargo.toml b/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/Nargo.toml new file mode 100644 index 00000000000..e49a82cf0fb --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "fold_dyn_index_fail" +type = "bin" +authors = [""] +compiler_version = ">=0.26.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/Prover.toml b/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/Prover.toml new file mode 100644 index 00000000000..caf3448c56f --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/Prover.toml @@ -0,0 +1,2 @@ +x = [104, 101, 108, 108, 111] +z = "4" diff --git a/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/src/main.nr b/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/src/main.nr new file mode 100644 index 00000000000..b12dea630b0 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/fold_dyn_index_fail/src/main.nr @@ -0,0 +1,10 @@ +fn main(mut x: [u32; 5], z: Field) { + x[z] = 4; + dynamic_index_check(x, z + 10); +} + +#[fold] +fn dynamic_index_check(x: [u32; 5], idx: Field) { + // Dynamic index is greater than length of the array + assert(x[idx] != 0); +} diff --git a/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/Nargo.toml b/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/Nargo.toml new file mode 100644 index 00000000000..bb7d5e20dcc --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "fold_nested_brillig_assert_fail" +type = "bin" +authors = [""] +compiler_version = ">=0.26.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/Prover.toml b/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/Prover.toml new file mode 100644 index 00000000000..11497a473bc --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/Prover.toml @@ -0,0 +1 @@ +x = "0" diff --git a/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/src/main.nr b/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/src/main.nr new file mode 100644 index 00000000000..0a5038c179b --- /dev/null +++ b/noir/noir-repo/test_programs/execution_failure/fold_nested_brillig_assert_fail/src/main.nr @@ -0,0 +1,26 @@ +// Tests a very simple program. +// +// The features being tested is using assert on brillig that is triggered through nested ACIR calls. +// We want to make sure we get a call stack from the original call in main to the failed assert. +fn main(x: Field) { + assert(1 == fold_conditional_wrapper(x as bool)); +} + +#[fold] +fn fold_conditional_wrapper(x: bool) -> Field { + fold_conditional(x) +} + +#[fold] +fn fold_conditional(x: bool) -> Field { + conditional_wrapper(x) +} + +unconstrained fn conditional_wrapper(x: bool) -> Field { + conditional(x) +} + +unconstrained fn conditional(x: bool) -> Field { + assert(x); + 1 +} diff --git a/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr b/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr index db269d63ac0..908cab8accc 100644 --- a/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr @@ -10,11 +10,11 @@ fn main(mut x: [u8; 5], y: [u8; 5]) { assert(a_bytes[i] == x[i]); assert(b_bytes[i] == y[i]); } + //Regression for issue #4578 + let d = a * b; + assert(d / b == a); - let d = a * b - b; - let d1 = bigint::Secpk1Fq::from_le_bytes(597243850900842442924.to_le_bytes(10)); - assert(d1 == d); - // big_int_example(x[0], x[1]); + big_int_example(x[0], x[1]); } // docs:start:big_int_example diff --git a/noir/noir-repo/test_programs/execution_success/unit_value/Nargo.toml b/noir/noir-repo/test_programs/execution_success/unit_value/Nargo.toml new file mode 100644 index 00000000000..f7e3697a7c1 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/unit_value/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "short" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/unit_value/src/main.nr b/noir/noir-repo/test_programs/execution_success/unit_value/src/main.nr new file mode 100644 index 00000000000..f3844e03cf2 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/unit_value/src/main.nr @@ -0,0 +1,7 @@ +fn get_transaction() { + dep::std::unsafe::zeroed() +} + +fn main() { + get_transaction(); +} diff --git a/noir/noir-repo/tooling/debugger/src/context.rs b/noir/noir-repo/tooling/debugger/src/context.rs index 5639407df54..9b535075484 100644 --- a/noir/noir-repo/tooling/debugger/src/context.rs +++ b/noir/noir-repo/tooling/debugger/src/context.rs @@ -339,7 +339,8 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.handle_foreign_call(foreign_call) } Err(err) => DebugCommandResult::Error(NargoError::ExecutionError( - ExecutionError::SolvingError(err), + // TODO: debugger does not not handle multiple acir calls + ExecutionError::SolvingError(err, None), )), } } @@ -382,7 +383,8 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } ACVMStatus::Failure(error) => DebugCommandResult::Error(NargoError::ExecutionError( - ExecutionError::SolvingError(error), + // TODO: debugger does not not handle multiple acir calls + ExecutionError::SolvingError(error, None), )), ACVMStatus::RequiresForeignCall(_) => { unreachable!("Unexpected pending foreign call resolution"); diff --git a/noir/noir-repo/tooling/debugger/src/dap.rs b/noir/noir-repo/tooling/debugger/src/dap.rs index dc83337a973..060945132f5 100644 --- a/noir/noir-repo/tooling/debugger/src/dap.rs +++ b/noir/noir-repo/tooling/debugger/src/dap.rs @@ -606,7 +606,7 @@ pub fn run_session( initial_witness: WitnessMap, ) -> Result<(), ServerError> { let debug_artifact = DebugArtifact { - debug_symbols: vec![program.debug], + debug_symbols: program.debug, file_map: program.file_map, warnings: program.warnings, }; diff --git a/noir/noir-repo/tooling/lsp/src/requests/profile_run.rs b/noir/noir-repo/tooling/lsp/src/requests/profile_run.rs index 89719947689..7d06bc87c85 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/profile_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/profile_run.rs @@ -84,9 +84,11 @@ fn on_profile_run_request_inner( let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); - let span_opcodes = compiled_program.debug.count_span_opcodes(); - let debug_artifact: DebugArtifact = compiled_program.clone().into(); - opcodes_counts.extend(span_opcodes); + for function_debug in compiled_program.debug.iter() { + let span_opcodes = function_debug.count_span_opcodes(); + opcodes_counts.extend(span_opcodes); + } + let debug_artifact: DebugArtifact = compiled_program.into(); file_map.extend(debug_artifact.file_map); } @@ -94,14 +96,17 @@ fn on_profile_run_request_inner( let compiled_contract = nargo::ops::transform_contract(compiled_contract, expression_width); - let function_debug_info: Vec<_> = - compiled_contract.functions.iter().map(|func| &func.debug).cloned().collect(); - let debug_artifact: DebugArtifact = compiled_contract.into(); - file_map.extend(debug_artifact.file_map); + let function_debug_info = compiled_contract + .functions + .iter() + .flat_map(|func| &func.debug) + .collect::>(); for contract_function_debug in function_debug_info { let span_opcodes = contract_function_debug.count_span_opcodes(); opcodes_counts.extend(span_opcodes); } + let debug_artifact: DebugArtifact = compiled_contract.into(); + file_map.extend(debug_artifact.file_map); } let result = NargoProfileRunResult { file_map, opcodes_counts }; diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs index 868fb4404fd..83bb4b94f82 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs @@ -4,7 +4,7 @@ use noirc_driver::{CompiledContract, CompiledContractOutputs, ContractFunction}; use serde::{Deserialize, Serialize}; use noirc_driver::DebugFile; -use noirc_errors::debug_info::DebugInfo; +use noirc_errors::debug_info::ProgramDebugInfo; use std::collections::{BTreeMap, HashMap}; use fm::FileId; @@ -68,10 +68,10 @@ pub struct ContractFunctionArtifact { pub bytecode: Program, #[serde( - serialize_with = "DebugInfo::serialize_compressed_base64_json", - deserialize_with = "DebugInfo::deserialize_compressed_base64_json" + serialize_with = "ProgramDebugInfo::serialize_compressed_base64_json", + deserialize_with = "ProgramDebugInfo::deserialize_compressed_base64_json" )] - pub debug_symbols: DebugInfo, + pub debug_symbols: ProgramDebugInfo, } impl From for ContractFunctionArtifact { @@ -82,7 +82,7 @@ impl From for ContractFunctionArtifact { custom_attributes: func.custom_attributes, abi: func.abi, bytecode: func.bytecode, - debug_symbols: func.debug, + debug_symbols: ProgramDebugInfo { debug_infos: func.debug }, } } } diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs index fbdf59805c9..496896468cc 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs @@ -121,7 +121,7 @@ impl DebugArtifact { impl From for DebugArtifact { fn from(compiled_program: CompiledProgram) -> Self { DebugArtifact { - debug_symbols: vec![compiled_program.debug], + debug_symbols: compiled_program.debug, file_map: compiled_program.file_map, warnings: compiled_program.warnings, } @@ -133,7 +133,7 @@ impl From for DebugArtifact { let all_functions_debug: Vec = compiled_artifact .functions .into_iter() - .map(|contract_function| contract_function.debug) + .flat_map(|contract_function| contract_function.debug) .collect(); DebugArtifact { diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs index 9e660cbd359..67ac9f53ec8 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs @@ -5,7 +5,7 @@ use fm::FileId; use noirc_abi::Abi; use noirc_driver::CompiledProgram; use noirc_driver::DebugFile; -use noirc_errors::debug_info::DebugInfo; +use noirc_errors::debug_info::ProgramDebugInfo; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug)] @@ -27,10 +27,10 @@ pub struct ProgramArtifact { pub bytecode: Program, #[serde( - serialize_with = "DebugInfo::serialize_compressed_base64_json", - deserialize_with = "DebugInfo::deserialize_compressed_base64_json" + serialize_with = "ProgramDebugInfo::serialize_compressed_base64_json", + deserialize_with = "ProgramDebugInfo::deserialize_compressed_base64_json" )] - pub debug_symbols: DebugInfo, + pub debug_symbols: ProgramDebugInfo, /// Map of file Id to the source code so locations in debug info can be mapped to source code they point to. pub file_map: BTreeMap, @@ -45,7 +45,7 @@ impl From for ProgramArtifact { abi: compiled_program.abi, noir_version: compiled_program.noir_version, bytecode: compiled_program.program, - debug_symbols: compiled_program.debug, + debug_symbols: ProgramDebugInfo { debug_infos: compiled_program.debug }, file_map: compiled_program.file_map, names: compiled_program.names, } @@ -59,7 +59,7 @@ impl From for CompiledProgram { abi: program.abi, noir_version: program.noir_version, program: program.bytecode, - debug: program.debug_symbols, + debug: program.debug_symbols.debug_infos, file_map: program.file_map, warnings: vec![], names: program.names, diff --git a/noir/noir-repo/tooling/nargo/src/errors.rs b/noir/noir-repo/tooling/nargo/src/errors.rs index f1e77d47a73..ac03330a7c8 100644 --- a/noir/noir-repo/tooling/nargo/src/errors.rs +++ b/noir/noir-repo/tooling/nargo/src/errors.rs @@ -1,5 +1,5 @@ use acvm::{ - acir::circuit::OpcodeLocation, + acir::circuit::{OpcodeLocation, ResolvedOpcodeLocation}, pwg::{ErrorLocation, OpcodeResolutionError}, }; use noirc_errors::{ @@ -61,7 +61,7 @@ impl NargoError { match execution_error { ExecutionError::AssertionFailed(message, _) => Some(message), - ExecutionError::SolvingError(error) => match error { + ExecutionError::SolvingError(error, _) => match error { OpcodeResolutionError::IndexOutOfBounds { .. } | OpcodeResolutionError::OpcodeNotSolvable(_) | OpcodeResolutionError::UnsatisfiedConstrain { .. } @@ -79,81 +79,105 @@ impl NargoError { #[derive(Debug, Error)] pub enum ExecutionError { #[error("Failed assertion: '{}'", .0)] - AssertionFailed(String, Vec), + AssertionFailed(String, Vec), - #[error(transparent)] - SolvingError(#[from] OpcodeResolutionError), + #[error("Failed to solve program: '{}'", .0)] + SolvingError(OpcodeResolutionError, Option>), } /// Extracts the opcode locations from a nargo error. fn extract_locations_from_error( error: &ExecutionError, - debug: &DebugInfo, + debug: &[DebugInfo], ) -> Option> { let mut opcode_locations = match error { - ExecutionError::SolvingError(OpcodeResolutionError::BrilligFunctionFailed { - call_stack, - .. - }) - | ExecutionError::AssertionFailed(_, call_stack) => Some(call_stack.clone()), - ExecutionError::SolvingError(OpcodeResolutionError::IndexOutOfBounds { - opcode_location: error_location, - .. - }) - | ExecutionError::SolvingError(OpcodeResolutionError::UnsatisfiedConstrain { - opcode_location: error_location, - }) => match error_location { + ExecutionError::SolvingError( + OpcodeResolutionError::BrilligFunctionFailed { .. }, + acir_call_stack, + ) => acir_call_stack.clone(), + ExecutionError::AssertionFailed(_, call_stack) => Some(call_stack.clone()), + ExecutionError::SolvingError( + OpcodeResolutionError::IndexOutOfBounds { opcode_location: error_location, .. }, + acir_call_stack, + ) + | ExecutionError::SolvingError( + OpcodeResolutionError::UnsatisfiedConstrain { opcode_location: error_location }, + acir_call_stack, + ) => match error_location { ErrorLocation::Unresolved => { unreachable!("Cannot resolve index for unsatisfied constraint") } - ErrorLocation::Resolved(opcode_location) => Some(vec![*opcode_location]), + ErrorLocation::Resolved(_) => acir_call_stack.clone(), }, _ => None, }?; - if let Some(OpcodeLocation::Brillig { acir_index, .. }) = opcode_locations.first() { - opcode_locations.insert(0, OpcodeLocation::Acir(*acir_index)); + // Insert the top-level Acir location where the Brillig function failed + for (i, resolved_location) in opcode_locations.iter().enumerate() { + if let ResolvedOpcodeLocation { + acir_function_index, + opcode_location: OpcodeLocation::Brillig { acir_index, .. }, + } = resolved_location + { + let acir_location = ResolvedOpcodeLocation { + acir_function_index: *acir_function_index, + opcode_location: OpcodeLocation::Acir(*acir_index), + }; + + opcode_locations.insert(i, acir_location); + // Go until the first brillig opcode as that means we have the start of a Brillig call stack. + // We have to loop through the opcode locations in case we had ACIR calls + // before the brillig function failure. + break; + } } Some( opcode_locations .iter() - .flat_map(|opcode_location| debug.opcode_location(opcode_location).unwrap_or_default()) + .flat_map(|resolved_location| { + debug[resolved_location.acir_function_index] + .opcode_location(&resolved_location.opcode_location) + .unwrap_or_default() + }) .collect(), ) } -/// Tries to generate a runtime diagnostic from a nargo error. It will successfully do so if it's a runtime error with a call stack. -pub fn try_to_diagnose_runtime_error( - nargo_err: &NargoError, - debug: &DebugInfo, -) -> Option { - let execution_error = match nargo_err { - NargoError::ExecutionError(execution_error) => execution_error, - _ => return None, - }; - - let source_locations = extract_locations_from_error(execution_error, debug)?; - - // The location of the error itself will be the location at the top - // of the call stack (the last item in the Vec). - let location = source_locations.last()?; - - let message = match nargo_err { +fn extract_message_from_error(nargo_err: &NargoError) -> String { + match nargo_err { NargoError::ExecutionError(ExecutionError::AssertionFailed(message, _)) => { format!("Assertion failed: '{message}'") } NargoError::ExecutionError(ExecutionError::SolvingError( OpcodeResolutionError::IndexOutOfBounds { index, array_size, .. }, + _, )) => { format!("Index out of bounds, array has size {array_size:?}, but index was {index:?}") } NargoError::ExecutionError(ExecutionError::SolvingError( OpcodeResolutionError::UnsatisfiedConstrain { .. }, + _, )) => "Failed constraint".into(), _ => nargo_err.to_string(), - }; + } +} +/// Tries to generate a runtime diagnostic from a nargo error. It will successfully do so if it's a runtime error with a call stack. +pub fn try_to_diagnose_runtime_error( + nargo_err: &NargoError, + debug: &[DebugInfo], +) -> Option { + let source_locations = match nargo_err { + NargoError::ExecutionError(execution_error) => { + extract_locations_from_error(execution_error, debug)? + } + _ => return None, + }; + // The location of the error itself will be the location at the top + // of the call stack (the last item in the Vec). + let location = source_locations.last()?; + let message = extract_message_from_error(nargo_err); Some( CustomDiagnostic::simple_error(message, String::new(), location.span) .in_file(location.file) diff --git a/noir/noir-repo/tooling/nargo/src/ops/execute.rs b/noir/noir-repo/tooling/nargo/src/ops/execute.rs index c2c0208b395..97584aff150 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/execute.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/execute.rs @@ -1,5 +1,5 @@ use acvm::acir::circuit::brillig::BrilligBytecode; -use acvm::acir::circuit::Program; +use acvm::acir::circuit::{OpcodeLocation, Program, ResolvedOpcodeLocation}; use acvm::acir::native_types::WitnessStack; use acvm::brillig_vm::brillig::ForeignCallResult; use acvm::pwg::{ACVMStatus, ErrorLocation, OpcodeNotSolvable, OpcodeResolutionError, ACVM}; @@ -22,6 +22,15 @@ struct ProgramExecutor<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> { blackbox_solver: &'a B, foreign_call_executor: &'a mut F, + + // The Noir compiler codegens per function and call stacks are not shared across ACIR function calls. + // We must rebuild a call stack when executing a program of many circuits. + call_stack: Vec, + + // Tracks the index of the current function we are executing. + // This is used to fetch the function we want to execute + // and to resolve call stack locations across many function calls. + current_function_index: usize, } impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, B, F> { @@ -37,6 +46,8 @@ impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, witness_stack: WitnessStack::default(), blackbox_solver, foreign_call_executor, + call_stack: Vec::default(), + current_function_index: 0, } } @@ -45,11 +56,8 @@ impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, } #[tracing::instrument(level = "trace", skip_all)] - fn execute_circuit( - &mut self, - circuit: &Circuit, - initial_witness: WitnessMap, - ) -> Result { + fn execute_circuit(&mut self, initial_witness: WitnessMap) -> Result { + let circuit = &self.functions[self.current_function_index]; let mut acvm = ACVM::new( self.blackbox_solver, &circuit.opcodes, @@ -71,9 +79,26 @@ impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, let call_stack = match &error { OpcodeResolutionError::UnsatisfiedConstrain { opcode_location: ErrorLocation::Resolved(opcode_location), - } => Some(vec![*opcode_location]), + } + | OpcodeResolutionError::IndexOutOfBounds { + opcode_location: ErrorLocation::Resolved(opcode_location), + .. + } => { + let resolved_location = ResolvedOpcodeLocation { + acir_function_index: self.current_function_index, + opcode_location: *opcode_location, + }; + self.call_stack.push(resolved_location); + Some(self.call_stack.clone()) + } OpcodeResolutionError::BrilligFunctionFailed { call_stack, .. } => { - Some(call_stack.clone()) + let brillig_call_stack = + call_stack.iter().map(|location| ResolvedOpcodeLocation { + acir_function_index: self.current_function_index, + opcode_location: *location, + }); + self.call_stack.extend(brillig_call_stack); + Some(self.call_stack.clone()) } _ => None, }; @@ -96,17 +121,20 @@ impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, { ExecutionError::AssertionFailed(message.to_owned(), call_stack) } else if let Some(assert_message) = circuit.get_assert_message( - *call_stack.last().expect("Call stacks should not be empty"), + call_stack + .last() + .expect("Call stacks should not be empty") + .opcode_location, ) { ExecutionError::AssertionFailed( assert_message.to_owned(), call_stack, ) } else { - ExecutionError::SolvingError(error) + ExecutionError::SolvingError(error, Some(call_stack)) } } - None => ExecutionError::SolvingError(error), + None => ExecutionError::SolvingError(error, None), })); } ACVMStatus::RequiresForeignCall(foreign_call) => { @@ -126,10 +154,24 @@ impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, } } ACVMStatus::RequiresAcirCall(call_info) => { + // Store the parent function index whose context we are currently executing + let acir_function_caller = self.current_function_index; + // Add call opcode to the call stack with a reference to the parent function index + self.call_stack.push(ResolvedOpcodeLocation { + acir_function_index: acir_function_caller, + opcode_location: OpcodeLocation::Acir(acvm.instruction_pointer()), + }); + + // Set current function to the circuit we are about to execute + self.current_function_index = call_info.id as usize; + // Execute the ACIR call let acir_to_call = &self.functions[call_info.id as usize]; let initial_witness = call_info.initial_witness; - let call_solved_witness = - self.execute_circuit(acir_to_call, initial_witness)?; + let call_solved_witness = self.execute_circuit(initial_witness)?; + + // Set tracking index back to the parent function after ACIR call execution + self.current_function_index = acir_function_caller; + let mut call_resolved_outputs = Vec::new(); for return_witness_index in acir_to_call.return_values.indices() { if let Some(return_value) = @@ -139,6 +181,7 @@ impl<'a, B: BlackBoxFunctionSolver, F: ForeignCallExecutor> ProgramExecutor<'a, } else { return Err(ExecutionError::SolvingError( OpcodeNotSolvable::MissingAssignment(return_witness_index).into(), + None, // Missing assignment errors do not supply user-facing diagnostics so we do not need to attach a call stack ) .into()); } @@ -160,15 +203,13 @@ pub fn execute_program( blackbox_solver: &B, foreign_call_executor: &mut F, ) -> Result { - let main = &program.functions[0]; - let mut executor = ProgramExecutor::new( &program.functions, &program.unconstrained_functions, blackbox_solver, foreign_call_executor, ); - let main_witness = executor.execute_circuit(main, initial_witness)?; + let main_witness = executor.execute_circuit(initial_witness)?; executor.witness_stack.push(0, main_witness); Ok(executor.finalize()) diff --git a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs b/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs index bc91929e5e7..33767314a37 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs @@ -167,8 +167,15 @@ pub struct DefaultForeignCallExecutor { impl DefaultForeignCallExecutor { pub fn new(show_output: bool, resolver_url: Option<&str>) -> Self { let oracle_resolver = resolver_url.map(|resolver_url| { - let transport_builder = + let mut transport_builder = Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + + if let Some(Ok(timeout)) = + std::env::var("NARGO_FOREIGN_CALL_TIMEOUT").ok().map(|timeout| timeout.parse()) + { + let timeout_duration = std::time::Duration::from_millis(timeout); + transport_builder = transport_builder.timeout(timeout_duration); + }; Client::with_transport(transport_builder.build()) }); DefaultForeignCallExecutor { diff --git a/noir/noir-repo/tooling/nargo/src/ops/optimize.rs b/noir/noir-repo/tooling/nargo/src/ops/optimize.rs index cfaaf27ea98..a62f4696328 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/optimize.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/optimize.rs @@ -1,25 +1,36 @@ +use acvm::acir::circuit::Program; use iter_extended::vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; - -/// TODO(https://github.com/noir-lang/noir/issues/4428): Need to update how these passes are run to account for -/// multiple ACIR functions +use noirc_errors::debug_info::DebugInfo; pub fn optimize_program(mut compiled_program: CompiledProgram) -> CompiledProgram { - let (optimized_circuit, location_map) = - acvm::compiler::optimize(std::mem::take(&mut compiled_program.program.functions[0])); - compiled_program.program.functions[0] = optimized_circuit; - compiled_program.debug.update_acir(location_map); + compiled_program.program = + optimize_program_internal(compiled_program.program, &mut compiled_program.debug); compiled_program } pub fn optimize_contract(contract: CompiledContract) -> CompiledContract { let functions = vecmap(contract.functions, |mut func| { - let (optimized_bytecode, location_map) = - acvm::compiler::optimize(std::mem::take(&mut func.bytecode.functions[0])); - func.bytecode.functions[0] = optimized_bytecode; - func.debug.update_acir(location_map); + func.bytecode = optimize_program_internal(func.bytecode, &mut func.debug); func }); CompiledContract { functions, ..contract } } + +fn optimize_program_internal(mut program: Program, debug: &mut [DebugInfo]) -> Program { + let functions = std::mem::take(&mut program.functions); + + let optimized_functions = functions + .into_iter() + .enumerate() + .map(|(i, function)| { + let (optimized_circuit, location_map) = acvm::compiler::optimize(function); + debug[i].update_acir(location_map); + optimized_circuit + }) + .collect::>(); + + program.functions = optimized_functions; + program +} diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index 45b1a88f99c..b216fff827d 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -84,7 +84,7 @@ fn test_status_program_compile_fail(err: CompileError, test_function: &TestFunct /// passed/failed to determine the test status. fn test_status_program_compile_pass( test_function: &TestFunction, - debug: DebugInfo, + debug: Vec, circuit_execution: Result, ) -> TestStatus { let circuit_execution_err = match circuit_execution { diff --git a/noir/noir-repo/tooling/nargo/src/ops/transform.rs b/noir/noir-repo/tooling/nargo/src/ops/transform.rs index 274286a46e4..b4811bd5780 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/transform.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/transform.rs @@ -1,21 +1,17 @@ -use acvm::acir::circuit::ExpressionWidth; +use acvm::acir::circuit::{ExpressionWidth, Program}; use iter_extended::vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; - -/// TODO(https://github.com/noir-lang/noir/issues/4428): Need to update how these passes are run to account for -/// multiple ACIR functions +use noirc_errors::debug_info::DebugInfo; pub fn transform_program( mut compiled_program: CompiledProgram, expression_width: ExpressionWidth, ) -> CompiledProgram { - let (optimized_circuit, location_map) = acvm::compiler::compile( - std::mem::take(&mut compiled_program.program.functions[0]), + compiled_program.program = transform_program_internal( + compiled_program.program, + &mut compiled_program.debug, expression_width, ); - - compiled_program.program.functions[0] = optimized_circuit; - compiled_program.debug.update_acir(location_map); compiled_program } @@ -24,14 +20,33 @@ pub fn transform_contract( expression_width: ExpressionWidth, ) -> CompiledContract { let functions = vecmap(contract.functions, |mut func| { - let (optimized_bytecode, location_map) = acvm::compiler::compile( - std::mem::take(&mut func.bytecode.functions[0]), - expression_width, - ); - func.bytecode.functions[0] = optimized_bytecode; - func.debug.update_acir(location_map); + func.bytecode = + transform_program_internal(func.bytecode, &mut func.debug, expression_width); + func }); CompiledContract { functions, ..contract } } + +fn transform_program_internal( + mut program: Program, + debug: &mut [DebugInfo], + expression_width: ExpressionWidth, +) -> Program { + let functions = std::mem::take(&mut program.functions); + + let optimized_functions = functions + .into_iter() + .enumerate() + .map(|(i, function)| { + let (optimized_circuit, location_map) = + acvm::compiler::compile(function, expression_width); + debug[i].update_acir(location_map); + optimized_circuit + }) + .collect::>(); + + program.functions = optimized_functions; + program +} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs index 5a9a33c0acd..7cb5cd7846b 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -232,7 +232,7 @@ pub(crate) fn debug_program( let initial_witness = compiled_program.abi.encode(inputs_map, None)?; let debug_artifact = DebugArtifact { - debug_symbols: vec![compiled_program.debug.clone()], + debug_symbols: compiled_program.debug.clone(), file_map: compiled_program.file_map.clone(), warnings: compiled_program.warnings.clone(), }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 697f6d7c1ea..a353065491f 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -149,7 +149,7 @@ pub(crate) fn execute_program( Ok(solved_witness_stack) => Ok(solved_witness_stack), Err(err) => { let debug_artifact = DebugArtifact { - debug_symbols: vec![compiled_program.debug.clone()], + debug_symbols: compiled_program.debug.clone(), file_map: compiled_program.file_map.clone(), warnings: compiled_program.warnings.clone(), }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index e9609687ffb..67825362f92 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -97,17 +97,21 @@ pub(crate) fn run( if args.profile_info { for compiled_program in &compiled_programs { - let span_opcodes = compiled_program.debug.count_span_opcodes(); let debug_artifact = DebugArtifact::from(compiled_program.clone()); - print_span_opcodes(span_opcodes, &debug_artifact); + for function_debug in compiled_program.debug.iter() { + let span_opcodes = function_debug.count_span_opcodes(); + print_span_opcodes(span_opcodes, &debug_artifact); + } } for compiled_contract in &compiled_contracts { let debug_artifact = DebugArtifact::from(compiled_contract.clone()); let functions = &compiled_contract.functions; for contract_function in functions { - let span_opcodes = contract_function.debug.count_span_opcodes(); - print_span_opcodes(span_opcodes, &debug_artifact); + for function_debug in contract_function.debug.iter() { + let span_opcodes = function_debug.count_span_opcodes(); + print_span_opcodes(span_opcodes, &debug_artifact); + } } } } diff --git a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs index 922337cdb74..980d02ee5dc 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs @@ -64,6 +64,7 @@ pub(crate) fn rewrite(visitor: &FmtVisitor, _shape: Shape, typ: UnresolvedType) | UnresolvedTypeData::Expression(_) | UnresolvedTypeData::String(_) | UnresolvedTypeData::FormatString(_, _) + | UnresolvedTypeData::Code | UnresolvedTypeData::TraitAsType(_, _) => visitor.slice(typ.span.unwrap()).into(), UnresolvedTypeData::Error => unreachable!(), } diff --git a/noir/noir-repo/tooling/noirc_abi/src/errors.rs b/noir/noir-repo/tooling/noirc_abi/src/errors.rs index 687fecfcc1d..4209a9e218b 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/errors.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/errors.rs @@ -1,4 +1,7 @@ -use crate::{input_parser::InputValue, AbiParameter, AbiType}; +use crate::{ + input_parser::{InputTypecheckingError, InputValue}, + AbiType, +}; use acvm::acir::native_types::Witness; use thiserror::Error; @@ -38,8 +41,8 @@ impl From for InputParserError { pub enum AbiError { #[error("Received parameters not expected by ABI: {0:?}")] UnexpectedParams(Vec), - #[error("The parameter {} is expected to be a {:?} but found incompatible value {value:?}", .param.name, .param.typ)] - TypeMismatch { param: AbiParameter, value: InputValue }, + #[error("The value passed for parameter `{}` does not match the specified type:\n{0}", .0.path())] + TypeMismatch(#[from] InputTypecheckingError), #[error("ABI expects the parameter `{0}`, but this was not found")] MissingParam(String), #[error( diff --git a/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs b/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs index f66e069d487..4cf66820b8d 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs @@ -1,6 +1,7 @@ use num_bigint::{BigInt, BigUint}; use num_traits::{Num, Zero}; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; +use thiserror::Error; use acvm::FieldElement; use serde::Serialize; @@ -22,63 +23,165 @@ pub enum InputValue { Struct(BTreeMap), } +#[derive(Debug, Error)] +pub enum InputTypecheckingError { + #[error("Value {value:?} does not fall within range of allowable values for a {typ:?}")] + OutsideOfValidRange { path: String, typ: AbiType, value: InputValue }, + #[error("Type {typ:?} is expected to have length {expected_length} but value {value:?} has length {actual_length}")] + LengthMismatch { + path: String, + typ: AbiType, + value: InputValue, + expected_length: usize, + actual_length: usize, + }, + #[error("Could not find value for required field `{expected_field}`. Found values for fields {found_fields:?}")] + MissingField { path: String, expected_field: String, found_fields: Vec }, + #[error("Additional unexpected field was provided for type {typ:?}. Found field named `{extra_field}`")] + UnexpectedField { path: String, typ: AbiType, extra_field: String }, + #[error("Type {typ:?} and value {value:?} do not match")] + IncompatibleTypes { path: String, typ: AbiType, value: InputValue }, +} + +impl InputTypecheckingError { + pub(crate) fn path(&self) -> &str { + match self { + InputTypecheckingError::OutsideOfValidRange { path, .. } + | InputTypecheckingError::LengthMismatch { path, .. } + | InputTypecheckingError::MissingField { path, .. } + | InputTypecheckingError::UnexpectedField { path, .. } + | InputTypecheckingError::IncompatibleTypes { path, .. } => path, + } + } +} + impl InputValue { /// Checks whether the ABI type matches the InputValue type - /// and also their arity - pub fn matches_abi(&self, abi_param: &AbiType) -> bool { + pub(crate) fn find_type_mismatch( + &self, + abi_param: &AbiType, + path: String, + ) -> Result<(), InputTypecheckingError> { match (self, abi_param) { - (InputValue::Field(_), AbiType::Field) => true, + (InputValue::Field(_), AbiType::Field) => Ok(()), (InputValue::Field(field_element), AbiType::Integer { width, .. }) => { - field_element.num_bits() <= *width + if field_element.num_bits() <= *width { + Ok(()) + } else { + Err(InputTypecheckingError::OutsideOfValidRange { + path, + typ: abi_param.clone(), + value: self.clone(), + }) + } } (InputValue::Field(field_element), AbiType::Boolean) => { - field_element.is_one() || field_element.is_zero() + if field_element.is_one() || field_element.is_zero() { + Ok(()) + } else { + Err(InputTypecheckingError::OutsideOfValidRange { + path, + typ: abi_param.clone(), + value: self.clone(), + }) + } } (InputValue::Vec(array_elements), AbiType::Array { length, typ, .. }) => { if array_elements.len() != *length as usize { - return false; + return Err(InputTypecheckingError::LengthMismatch { + path, + typ: abi_param.clone(), + value: self.clone(), + expected_length: *length as usize, + actual_length: array_elements.len(), + }); } // Check that all of the array's elements' values match the ABI as well. - array_elements.iter().all(|input_value| input_value.matches_abi(typ)) + for (i, element) in array_elements.iter().enumerate() { + let mut path = path.clone(); + path.push_str(&format!("[{i}]")); + + element.find_type_mismatch(typ, path)?; + } + Ok(()) } (InputValue::String(string), AbiType::String { length }) => { - string.len() == *length as usize + if string.len() == *length as usize { + Ok(()) + } else { + Err(InputTypecheckingError::LengthMismatch { + path, + typ: abi_param.clone(), + value: self.clone(), + actual_length: string.len(), + expected_length: *length as usize, + }) + } } (InputValue::Struct(map), AbiType::Struct { fields, .. }) => { - if map.len() != fields.len() { - return false; + for (field_name, field_type) in fields { + if let Some(value) = map.get(field_name) { + let mut path = path.clone(); + path.push_str(&format!(".{field_name}")); + value.find_type_mismatch(field_type, path)?; + } else { + return Err(InputTypecheckingError::MissingField { + path, + expected_field: field_name.to_string(), + found_fields: map.keys().cloned().collect(), + }); + } } - let field_types = BTreeMap::from_iter(fields.iter().cloned()); + if map.len() > fields.len() { + let expected_fields: HashSet = + fields.iter().map(|(field, _)| field.to_string()).collect(); + let extra_field = map.keys().find(|&key| !expected_fields.contains(key)).cloned().expect("`map` is larger than the expected type's `fields` so it must contain an unexpected field"); + return Err(InputTypecheckingError::UnexpectedField { + path, + typ: abi_param.clone(), + extra_field: extra_field.to_string(), + }); + } - // Check that all of the struct's fields' values match the ABI as well. - map.iter().all(|(field_name, field_value)| { - if let Some(field_type) = field_types.get(field_name) { - field_value.matches_abi(field_type) - } else { - false - } - }) + Ok(()) } (InputValue::Vec(vec_elements), AbiType::Tuple { fields }) => { if vec_elements.len() != fields.len() { - return false; + return Err(InputTypecheckingError::LengthMismatch { + path, + typ: abi_param.clone(), + value: self.clone(), + actual_length: vec_elements.len(), + expected_length: fields.len(), + }); } - - vec_elements - .iter() - .zip(fields) - .all(|(input_value, abi_param)| input_value.matches_abi(abi_param)) + // Check that all of the array's elements' values match the ABI as well. + for (i, (element, expected_typ)) in vec_elements.iter().zip(fields).enumerate() { + let mut path = path.clone(); + path.push_str(&format!(".{i}")); + element.find_type_mismatch(expected_typ, path)?; + } + Ok(()) } // All other InputValue-AbiType combinations are fundamentally incompatible. - _ => false, + _ => Err(InputTypecheckingError::IncompatibleTypes { + path, + typ: abi_param.clone(), + value: self.clone(), + }), } } + + /// Checks whether the ABI type matches the InputValue type. + pub fn matches_abi(&self, abi_param: &AbiType) -> bool { + self.find_type_mismatch(abi_param, String::new()).is_ok() + } } /// The different formats that are supported when parsing diff --git a/noir/noir-repo/tooling/noirc_abi/src/lib.rs b/noir/noir-repo/tooling/noirc_abi/src/lib.rs index 89a60b0ed26..6ad13500bdd 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/lib.rs @@ -307,15 +307,7 @@ impl Abi { .ok_or_else(|| AbiError::MissingParam(param_name.clone()))? .clone(); - if !value.matches_abi(&expected_type) { - let param = self - .parameters - .iter() - .find(|param| param.name == param_name) - .unwrap() - .clone(); - return Err(AbiError::TypeMismatch { param, value }); - } + value.find_type_mismatch(&expected_type, param_name.clone())?; Self::encode_value(value, &expected_type).map(|v| (param_name, v)) }) diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/errors.test.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/errors.test.ts index 429a2d446a3..0f75ff64a3e 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/errors.test.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/errors.test.ts @@ -9,7 +9,7 @@ it('errors when an integer input overflows', async () => { const { abi, inputs } = await import('../shared/uint_overflow'); expect(() => abiEncode(abi, inputs)).to.throw( - 'The parameter foo is expected to be a Integer { sign: Unsigned, width: 32 } but found incompatible value Field(2³⁸)', + 'The value passed for parameter `foo` does not match the specified type:\nValue Field(2³⁸) does not fall within range of allowable values for a Integer { sign: Unsigned, width: 32 }', ); }); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/node/errors.test.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/node/errors.test.ts index 0d007e64803..fba451b4a8c 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/node/errors.test.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/node/errors.test.ts @@ -5,7 +5,7 @@ it('errors when an integer input overflows', async () => { const { abi, inputs } = await import('../shared/uint_overflow'); expect(() => abiEncode(abi, inputs)).to.throw( - 'The parameter foo is expected to be a Integer { sign: Unsigned, width: 32 } but found incompatible value Field(2³⁸)', + 'The value passed for parameter `foo` does not match the specified type:\nValue Field(2³⁸) does not fall within range of allowable values for a Integer { sign: Unsigned, width: 32 }', ); }); diff --git a/yarn-project/foundation/src/abi/abi.ts b/yarn-project/foundation/src/abi/abi.ts index 1a9fb2b8901..9f2d48800f8 100644 --- a/yarn-project/foundation/src/abi/abi.ts +++ b/yarn-project/foundation/src/abi/abi.ts @@ -240,6 +240,16 @@ export interface DebugInfo { locations: Record; } +/** + * The debug information for a given program (a collection of functions) + */ +export interface ProgramDebugInfo { + /** + * A list of debug information that matches with each function in a program + */ + debug_infos: Array; +} + /** * Maps a file ID to its metadata for debugging purposes. */ @@ -355,10 +365,13 @@ export function getFunctionDebugMetadata( functionArtifact: FunctionArtifact, ): FunctionDebugMetadata | undefined { if (functionArtifact.debugSymbols && contractArtifact.fileMap) { - const debugSymbols = JSON.parse( + const programDebugSymbols = JSON.parse( inflate(Buffer.from(functionArtifact.debugSymbols, 'base64'), { to: 'string', raw: true }), ); - return { debugSymbols, files: contractArtifact.fileMap }; + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/5813) + // We only support handling debug info for the contract function entry point. + // So for now we simply index into the first debug info. + return { debugSymbols: programDebugSymbols.debug_infos[0], files: contractArtifact.fileMap }; } return undefined; }