diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 00000000000..b1346a8792f --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,19 @@ +module.exports = { + root: true, + parser: "@typescript-eslint/parser", + plugins: ["@typescript-eslint", "prettier"], + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], + rules: { + "comma-spacing": ["error", { before: false, after: true }], + "no-unused-vars": "off", + "@typescript-eslint/no-unused-vars": [ + "warn", // or "error" + { + argsIgnorePattern: "^_", + varsIgnorePattern: "^_", + caughtErrorsIgnorePattern: "^_", + }, + ], + "prettier/prettier": "error", + }, +}; diff --git a/.github/ISSUE_TEMPLATE/idea_action_plan.yml b/.github/ISSUE_TEMPLATE/idea_action_plan.yml new file mode 100644 index 00000000000..02fed1fc48b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/idea_action_plan.yml @@ -0,0 +1,54 @@ +name: Idea Action Plan +description: Outline the scope and steps for implementing an enhancement. Start with "Ideas" instead to request and discuss new features. +labels: ["enhancement"] +body: + - type: markdown + attributes: + value: | + ## Description + Thanks for taking the time to create the Issue, and welcome to the Noirot family! + - type: textarea + id: problem + attributes: + label: Problem + description: Describe what you feel lacking. Supply code / step-by-step examples if applicable. + validations: + required: true + - type: textarea + id: solution + attributes: + label: Happy Case + description: Describe how you think it should work. Supply pseudocode / step-by-step examples if applicable. + validations: + required: true + - type: textarea + id: alternatives + attributes: + label: Alternatives Considered + description: Describe less-happy cases you have considered, if any. + - type: textarea + id: additional + attributes: + label: Additional Context + description: Supplement further information if applicable. + - type: markdown + attributes: + value: | + ## Pull Request + - type: dropdown + id: pr-preference + attributes: + label: Would you like to submit a PR for this Issue? + description: Fellow contributors are happy to provide support where applicable. + multiple: false + options: + - "No" + - "Maybe" + - "Yes" + validations: + required: true + - type: textarea + id: pr-support + attributes: + label: Support Needs + description: Support from other contributors you are looking for to create a PR for this Issue. diff --git a/.github/workflows/abi_wasm.yml b/.github/workflows/abi_wasm.yml index 8f219a77977..fbbfeba2331 100644 --- a/.github/workflows/abi_wasm.yml +++ b/.github/workflows/abi_wasm.yml @@ -47,8 +47,8 @@ jobs: - name: Build noirc_abi_wasm run: | nix build -L .#noirc_abi_wasm - cp -r ./result/nodejs ./tooling/noirc_abi_wasm - cp -r ./result/web ./tooling/noirc_abi_wasm + cp -r ./result/noirc_abi_wasm/nodejs ./tooling/noirc_abi_wasm + cp -r ./result/noirc_abi_wasm/web ./tooling/noirc_abi_wasm - name: Export cache from nix store if: ${{ steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} @@ -63,7 +63,7 @@ jobs: key: ${{ steps.cache.outputs.cache-primary-key }} - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f result)" >> $GITHUB_ENV + run: echo "UPLOAD_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV - name: Upload artifact uses: actions/upload-artifact@v3 @@ -75,7 +75,7 @@ jobs: test: runs-on: ubuntu-latest needs: [build] - + steps: - name: Checkout sources uses: actions/checkout@v3 @@ -85,7 +85,7 @@ jobs: with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm - + - name: Install Yarn dependencies uses: ./.github/actions/setup @@ -94,7 +94,7 @@ jobs: - name: Install Playwright uses: ./.github/actions/install-playwright - + - name: Run browser tests run: yarn workspace @noir-lang/noirc_abi test:browser diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index bdc95b40bd5..07ae535a454 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -1,4 +1,4 @@ -name: Clippy +name: Formatting on: pull_request: @@ -48,3 +48,18 @@ jobs: - name: Run `cargo fmt` run: cargo fmt --all --check + + eslint: + name: eslint + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Run `yarn lint` + run: yarn lint diff --git a/.github/workflows/publish-abi_wasm.yml b/.github/workflows/publish-abi_wasm.yml index 3ca7b77f747..1769b4d771c 100644 --- a/.github/workflows/publish-abi_wasm.yml +++ b/.github/workflows/publish-abi_wasm.yml @@ -36,7 +36,7 @@ jobs: nix build -L .#noirc_abi_wasm - name: Discover Build Output Path - run: echo "BUILD_OUTPUT_PATH=$(readlink -f ./result)" >> $GITHUB_ENV + run: echo "BUILD_OUTPUT_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV - name: Copy Build Output to Temporary Directory run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f39f35775f7..9d3ff9423ea 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -95,6 +95,24 @@ jobs: overwrite: true tag: ${{ inputs.tag || 'nightly' }} # This will fail if `inputs.tag` is not a tag (e.g. testing a branch) + - name: Get formatted date + id: date + if: ${{ inputs.tag == '' && inputs.publish || github.event_name == 'schedule' }} + run: echo "date=$(date '+%Y-%m-%d')" >> $GITHUB_OUTPUT + + - name: Upload binaries to release with date tag + uses: svenstaro/upload-release-action@v2 + if: ${{ inputs.tag == '' && inputs.publish || github.event_name == 'schedule' }} + with: + repo_name: noir-lang/noir + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: ./nargo-${{ matrix.target }}.tar.gz + asset_name: nargo-${{ matrix.target }}.tar.gz + prerelease: true + make_latest: false + overwrite: true + tag: ${{ format('{0}-{1}', 'nightly', steps.date.outputs.date) }} + build-linux: runs-on: ubuntu-22.04 env: @@ -162,3 +180,21 @@ jobs: asset_name: nargo-${{ matrix.target }}.tar.gz overwrite: true tag: ${{ inputs.tag || 'nightly' }} # This will fail if `inputs.tag` is not a tag (e.g. testing a branch) + + - name: Get formatted date + id: date + if: ${{ inputs.tag == '' && inputs.publish || github.event_name == 'schedule' }} + run: echo "date=$(date '+%Y-%m-%d')" >> $GITHUB_OUTPUT + + - name: Upload binaries to release with date tag + uses: svenstaro/upload-release-action@v2 + if: ${{ inputs.tag == '' && inputs.publish || github.event_name == 'schedule' }} + with: + repo_name: noir-lang/noir + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: ./nargo-${{ matrix.target }}.tar.gz + asset_name: nargo-${{ matrix.target }}.tar.gz + prerelease: true + make_latest: false + overwrite: true + tag: ${{ format('{0}-{1}', 'nightly', steps.date.outputs.date) }} diff --git a/.github/workflows/test-integration.yml b/.github/workflows/test-integration.yml index 46606ace966..662b024d6b7 100644 --- a/.github/workflows/test-integration.yml +++ b/.github/workflows/test-integration.yml @@ -54,10 +54,10 @@ jobs: - name: Build noir_wasm package run: | - nix build -L .#wasm - echo "UPLOAD_PATH=$(readlink -f result)" >> $GITHUB_ENV - cp -r ./result/nodejs ./compiler/wasm - cp -r ./result/web ./compiler/wasm + nix build -L .#noir_wasm + echo "UPLOAD_PATH=$(readlink -f ./result/noir_wasm)" >> $GITHUB_ENV + cp -r ./result/noir_wasm/nodejs ./compiler/wasm + cp -r ./result/noir_wasm/web ./compiler/wasm - name: Upload `noir_wasm` artifact uses: actions/upload-artifact@v3 @@ -69,9 +69,9 @@ jobs: - name: Build noirc_abi_wasm package run: | nix build -L .#noirc_abi_wasm - echo "UPLOAD_PATH=$(readlink -f result)" >> $GITHUB_ENV - cp -r ./result/nodejs ./tooling/noirc_abi_wasm - cp -r ./result/web ./tooling/noirc_abi_wasm + echo "UPLOAD_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV + cp -r ./result/noirc_abi_wasm/nodejs ./tooling/noirc_abi_wasm + cp -r ./result/noirc_abi_wasm/web ./tooling/noirc_abi_wasm - name: Upload `noirc_abi_wasm` artifact uses: actions/upload-artifact@v3 @@ -85,13 +85,14 @@ jobs: - name: Setup `integration-tests` run: | - yarn workspace @noir-lang/noir-source-resolver build + yarn workspace @noir-lang/source-resolver build + yarn workspace @noir-lang/noir_js build - name: Run `integration-tests` run: | yarn test:integration - - - name: Alert on nightly test failure + + - name: Alert on nightly test failure uses: JasonEtco/create-an-issue@v2 if: ${{ failure() && github.event_name == 'schedule' }} env: diff --git a/.github/workflows/track-acvm.yml b/.github/workflows/track-acvm.yml new file mode 100644 index 00000000000..6d02fdc2f1a --- /dev/null +++ b/.github/workflows/track-acvm.yml @@ -0,0 +1,29 @@ +name: Track ACVM for Integration Tests + +on: + pull_request: + paths: + - "Cargo.lock" + - "yarn.lock" + +jobs: + check_matching_version: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Check acvm version change + run: | + # Extract acvm version from the current Cargo.lock + CURRENT_VERSION=$(awk '/name = "acvm"/ {getline; print $3}' Cargo.lock | tr -d '"') + INTEGRATION_TEST_VERSION=$(yarn workspace @noir-lang/noir_js info @noir-lang/acvm_js --json | jq .children.Version | tr -d '"') + + echo "Current ACVM Version: $CURRENT_VERSION" + echo "Integration Test ACVM Version (Noir JS): $INTEGRATION_TEST_VERSION" + + if [ "$CURRENT_VERSION" != "$INTEGRATION_TEST_VERSION" ]; then + exit 1 + else + echo "ACVM version is a match." + fi diff --git a/.github/workflows/wasm.yml b/.github/workflows/wasm.yml index f7b36738482..71972d5e5c5 100644 --- a/.github/workflows/wasm.yml +++ b/.github/workflows/wasm.yml @@ -17,7 +17,7 @@ jobs: strategy: matrix: target: [x86_64-unknown-linux-gnu] - + steps: - name: Checkout Noir repo uses: actions/checkout@v4 @@ -81,7 +81,7 @@ jobs: - name: Build wasm package run: | - nix build -L .#wasm + nix build -L .#noir_wasm - name: Export cache from nix store if: ${{ steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} @@ -96,7 +96,7 @@ jobs: key: ${{ steps.cache.outputs.cache-primary-key }} - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f result)" >> $GITHUB_ENV + run: echo "UPLOAD_PATH=$(readlink -f ./result/noir_wasm)" >> $GITHUB_ENV - name: Upload artifact uses: actions/upload-artifact@v3 diff --git a/Cargo.lock b/Cargo.lock index 8cccc14d393..646d422f686 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -46,24 +46,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "acvm-backend-barretenberg" -version = "0.11.0" -dependencies = [ - "acvm", - "build-target", - "const_format", - "dirs", - "flate2", - "reqwest", - "serde", - "serde_json", - "tar", - "tempfile", - "test-binary", - "thiserror", -] - [[package]] name = "acvm_blackbox_solver" version = "0.26.1" @@ -402,6 +384,25 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "backend-interface" +version = "0.11.0" +dependencies = [ + "acvm", + "bb_abstraction_leaks", + "build-target", + "const_format", + "dirs", + "flate2", + "reqwest", + "serde", + "serde_json", + "tar", + "tempfile", + "test-binary", + "thiserror", +] + [[package]] name = "backtrace" version = "0.3.68" @@ -435,6 +436,15 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bb_abstraction_leaks" +version = "0.11.0" +dependencies = [ + "acvm", + "build-target", + "const_format", +] + [[package]] name = "bincode" version = "1.3.3" @@ -2186,10 +2196,11 @@ name = "nargo_cli" version = "0.12.0" dependencies = [ "acvm", - "acvm-backend-barretenberg", "assert_cmd", "assert_fs", "async-lsp", + "backend-interface", + "bb_abstraction_leaks", "build-data", "clap", "color-eyre", diff --git a/Cargo.toml b/Cargo.toml index dac1c15e0a5..2879823163e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,8 @@ members = [ "compiler/utils/arena", "compiler/utils/iter-extended", # Crates related to tooling built ontop of the Noir compiler - "tooling/acvm_backend_barretenberg", + "tooling/backend_interface", + "tooling/bb_abstraction_leaks", "tooling/nargo", "tooling/nargo_cli", "tooling/nargo_toml", @@ -41,6 +42,7 @@ nargo_cli = { path = "tooling/nargo_cli" } nargo_toml = { path = "tooling/nargo_toml" } noir_lsp = { path = "tooling/lsp" } noirc_abi = { path = "tooling/noirc_abi" } +bb_abstraction_leaks = { path = "tooling/bb_abstraction_leaks" } noirc_driver = { path = "compiler/noirc_driver" } noirc_errors = { path = "compiler/noirc_errors" } noirc_evaluator = { path = "compiler/noirc_evaluator" } diff --git a/compiler/integration-tests/.eslintignore b/compiler/integration-tests/.eslintignore new file mode 100644 index 00000000000..3c3629e647f --- /dev/null +++ b/compiler/integration-tests/.eslintignore @@ -0,0 +1 @@ +node_modules diff --git a/compiler/integration-tests/.eslintrc.js b/compiler/integration-tests/.eslintrc.js index d17e4ef520c..33335c2a877 100644 --- a/compiler/integration-tests/.eslintrc.js +++ b/compiler/integration-tests/.eslintrc.js @@ -1,19 +1,3 @@ module.exports = { - root: true, - parser: "@typescript-eslint/parser", - plugins: ["@typescript-eslint", "prettier"], - extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], - rules: { - "comma-spacing": ["error", { before: false, after: true }], - // "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": [ - "warn", // or "error" - { - argsIgnorePattern: "^_", - varsIgnorePattern: "^_", - caughtErrorsIgnorePattern: "^_", - }, - ], - "prettier/prettier": "error", - } + extends: ["../../.eslintrc.js"], }; diff --git a/compiler/integration-tests/package.json b/compiler/integration-tests/package.json index 8ba0d2c725a..4272c6d24c4 100644 --- a/compiler/integration-tests/package.json +++ b/compiler/integration-tests/package.json @@ -7,14 +7,14 @@ "build": "echo Integration Test build step", "test": "yarn test:browser", "test:browser": "web-test-runner", - "test:integration:browser": "web-test-runner test//integration/browser/**/*.test.ts", - "test:integration:browser:watch": "web-test-runner test/integration/browser/**/*.test.ts --watch" + "test:integration:browser": "web-test-runner test/integration/browser/**/*.test.ts", + "test:integration:browser:watch": "web-test-runner test/integration/browser/**/*.test.ts --watch", + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "^0.5.1", - "@noir-lang/acvm_js": "^0.26.0", + "@aztec/bb.js": "^0.6.7", + "@noir-lang/noir_js": "workspace:*", "@noir-lang/noir_wasm": "workspace:*", - "@noir-lang/noirc_abi": "workspace:*", "@noir-lang/source-resolver": "workspace:*", "@web/dev-server-esbuild": "^0.3.6", "@web/test-runner": "^0.15.3", diff --git a/compiler/integration-tests/test/index.d.ts b/compiler/integration-tests/test/index.d.ts new file mode 100644 index 00000000000..4db9e01cacd --- /dev/null +++ b/compiler/integration-tests/test/index.d.ts @@ -0,0 +1 @@ +declare module "@aztec/bb.js"; diff --git a/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts b/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts index e2a88e331d0..5b390d566a8 100644 --- a/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts +++ b/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts @@ -1,22 +1,19 @@ -import { expect } from '@esm-bundle/chai'; +import { expect } from "@esm-bundle/chai"; import { initializeResolver } from "@noir-lang/source-resolver"; import newCompiler, { - compile, - init_log_level as compilerLogLevel + compile, + init_log_level as compilerLogLevel, } from "@noir-lang/noir_wasm"; -import { decompressSync as gunzip } from 'fflate'; -import newABICoder, { abiEncode } from "@noir-lang/noirc_abi"; -import initACVM, { - executeCircuit, - WitnessMap, - compressWitness, -} from "@noir-lang/acvm_js"; +import { Barretenberg, RawBuffer, Crs } from "@aztec/bb.js"; +import { acvm, noirc } from "@noir-lang/noir_js"; +import { decompressSync as gunzip } from "fflate"; -// @ts-ignore -import { Barretenberg, RawBuffer, Crs } from '@aztec/bb.js'; +import * as TOML from "smol-toml"; -import * as TOML from 'smol-toml' +const { default: initACVM, executeCircuit, compressWitness } = acvm; +const { default: newABICoder, abiEncode } = noirc; +type WitnessMap = acvm.WitnessMap; await newCompiler(); await newABICoder(); @@ -25,133 +22,141 @@ await initACVM(); compilerLogLevel("DEBUG"); async function getFile(url: URL): Promise { + const response = await fetch(url); - const response = await fetch(url) + if (!response.ok) throw new Error("Network response was not OK"); - if (!response.ok) throw new Error('Network response was not OK'); - - return await response.text(); + return await response.text(); } const CIRCUIT_SIZE = 2 ** 19; - const test_cases = [ - { - case: "tooling/nargo_cli/tests/execution_success/1_mul" - }, - { - case: "tooling/nargo_cli/tests/execution_success/double_verify_proof" - } + { + case: "tooling/nargo_cli/tests/execution_success/1_mul", + }, + { + case: "tooling/nargo_cli/tests/execution_success/double_verify_proof", + }, ]; const numberOfThreads = navigator.hardwareConcurrency || 1; -let suite = Mocha.Suite.create(mocha.suite, "Noir end to end test"); +const suite = Mocha.Suite.create(mocha.suite, "Noir end to end test"); -suite.timeout(60*20e3);//20mins +suite.timeout(60 * 20e3); //20mins test_cases.forEach((testInfo) => { - const test_name = testInfo.case.split("/").pop(); - const mochaTest = new Mocha.Test(`${test_name} (Compile, Execute, Prove, Verify)`, async () => { - - const base_relative_path = "../../../../.."; - const test_case = testInfo.case; - - const noir_source_url = new URL(`${base_relative_path}/${test_case}/src/main.nr`, import.meta.url); - const prover_toml_url = new URL(`${base_relative_path}/${test_case}/Prover.toml`, import.meta.url); - - const noir_source = await getFile(noir_source_url); - const prover_toml = await getFile(prover_toml_url); - - expect(noir_source).to.be.a.string; - - initializeResolver((id: String) => { - console.log("Resolving:", id); - return noir_source; - }); - - const inputs = TOML.parse(prover_toml); - - expect(inputs, "Prover.toml").to.be.an('object'); - - let compile_output; - - try { - - compile_output = await compile({}); - - expect(await compile_output, "Compile output ").to.be.an('object'); - - } catch (e) { - expect(e, "Compilation Step").to.not.be.an('error'); - throw e; - } - - - let witnessMap: WitnessMap; - try { - - witnessMap = abiEncode(compile_output.abi, inputs, null); - - } catch (e) { - expect(e, "Abi Encoding Step").to.not.be.an('error'); - throw e; - } - - let solvedWitness: WitnessMap; - let compressedByteCode; - try { - compressedByteCode = Uint8Array.from(atob(compile_output.circuit), c => c.charCodeAt(0)); - - solvedWitness = await executeCircuit( - compressedByteCode, - witnessMap, - () => { - throw Error("unexpected oracle"); - } - ); - - } catch (e) { - expect(e, "Abi Encoding Step").to.not.be.an('error'); - throw e; - } - - try { - const compressedWitness = compressWitness(solvedWitness); - const acirUint8Array = gunzip(compressedByteCode); - const witnessUint8Array = gunzip(compressedWitness); - - const isRecursive = true; - const api = await Barretenberg.new(numberOfThreads); - await api.commonInitSlabAllocator(CIRCUIT_SIZE); - - // Plus 1 needed! - const crs = await Crs.new(CIRCUIT_SIZE + 1); - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - - const acirComposer = await api.acirNewAcirComposer(CIRCUIT_SIZE); - - // This took ~6.5 minutes! - const proof = await api.acirCreateProof( - acirComposer, - acirUint8Array, - witnessUint8Array, - isRecursive - ); - - // And this took ~5 minutes! - const verified = await api.acirVerifyProof(acirComposer, proof, isRecursive); - - expect(verified).to.be.true; - - } catch (e) { - expect(e, "Proving and Verifying").to.not.be.an('error'); - throw e; - } - - }); - - suite.addTest(mochaTest); + const test_name = testInfo.case.split("/").pop(); + const mochaTest = new Mocha.Test( + `${test_name} (Compile, Execute, Prove, Verify)`, + async () => { + const base_relative_path = "../../../../.."; + const test_case = testInfo.case; + + const noir_source_url = new URL( + `${base_relative_path}/${test_case}/src/main.nr`, + import.meta.url + ); + const prover_toml_url = new URL( + `${base_relative_path}/${test_case}/Prover.toml`, + import.meta.url + ); + + const noir_source = await getFile(noir_source_url); + const prover_toml = await getFile(prover_toml_url); + + expect(noir_source).to.be.a.string; + + initializeResolver((id: string) => { + console.log("Resolving:", id); + return noir_source; + }); + + const inputs = TOML.parse(prover_toml); + + expect(inputs, "Prover.toml").to.be.an("object"); + + let compile_output; + + try { + compile_output = await compile({}); + + expect(await compile_output, "Compile output ").to.be.an("object"); + } catch (e) { + expect(e, "Compilation Step").to.not.be.an("error"); + throw e; + } + + let witnessMap: WitnessMap; + try { + witnessMap = abiEncode(compile_output.abi, inputs, null); + } catch (e) { + expect(e, "Abi Encoding Step").to.not.be.an("error"); + throw e; + } + + let solvedWitness: WitnessMap; + let compressedByteCode; + try { + compressedByteCode = Uint8Array.from( + atob(compile_output.circuit), + (c) => c.charCodeAt(0) + ); + + solvedWitness = await executeCircuit( + compressedByteCode, + witnessMap, + () => { + throw Error("unexpected oracle"); + } + ); + } catch (e) { + expect(e, "Abi Encoding Step").to.not.be.an("error"); + throw e; + } + + try { + const compressedWitness = compressWitness(solvedWitness); + const acirUint8Array = gunzip(compressedByteCode); + const witnessUint8Array = gunzip(compressedWitness); + + const isRecursive = true; + const api = await Barretenberg.new(numberOfThreads); + await api.commonInitSlabAllocator(CIRCUIT_SIZE); + + // Plus 1 needed! + const crs = await Crs.new(CIRCUIT_SIZE + 1); + await api.srsInitSrs( + new RawBuffer(crs.getG1Data()), + crs.numPoints, + new RawBuffer(crs.getG2Data()) + ); + + const acirComposer = await api.acirNewAcirComposer(CIRCUIT_SIZE); + + // This took ~6.5 minutes! + const proof = await api.acirCreateProof( + acirComposer, + acirUint8Array, + witnessUint8Array, + isRecursive + ); + + // And this took ~5 minutes! + const verified = await api.acirVerifyProof( + acirComposer, + proof, + isRecursive + ); + + expect(verified).to.be.true; + } catch (e) { + expect(e, "Proving and Verifying").to.not.be.an("error"); + throw e; + } + } + ); + suite.addTest(mochaTest); }); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index 53e86a00e75..22814a22889 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -1,29 +1,24 @@ pub(crate) mod brillig_black_box; pub(crate) mod brillig_block; +pub(crate) mod brillig_block_variables; pub(crate) mod brillig_directive; pub(crate) mod brillig_fn; pub(crate) mod brillig_slice_ops; +mod variable_liveness; use self::{brillig_block::BrilligBlock, brillig_fn::FunctionContext}; use super::brillig_ir::{artifact::BrilligArtifact, BrilligContext}; -use crate::ssa::ir::{function::Function, post_order::PostOrder}; -use fxhash::FxHashMap as HashMap; +use crate::ssa::ir::function::Function; /// Converting an SSA function into Brillig bytecode. pub(crate) fn convert_ssa_function(func: &Function, enable_debug_trace: bool) -> BrilligArtifact { - let mut reverse_post_order = Vec::new(); - reverse_post_order.extend_from_slice(PostOrder::with_function(func).as_slice()); - reverse_post_order.reverse(); - - let mut function_context = FunctionContext { - function_id: func.id(), - ssa_value_to_brillig_variable: HashMap::default(), - }; - let mut brillig_context = BrilligContext::new(enable_debug_trace); + let mut function_context = FunctionContext::new(func, &mut brillig_context); + brillig_context.enter_context(FunctionContext::function_id_to_function_label(func.id())); - for block in reverse_post_order { + + for block in function_context.blocks.clone() { BrilligBlock::compile(&mut function_context, &mut brillig_context, block, &func.dfg); } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index c54be4faa50..eb2eb1e5f24 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,5 +1,6 @@ use crate::brillig::brillig_ir::{ - BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, + extract_heap_array, extract_register, extract_registers, BrilligBinaryOp, BrilligContext, + BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::{ @@ -15,9 +16,11 @@ use crate::ssa::ir::{ use acvm::acir::brillig::{BinaryFieldOp, BinaryIntOp, HeapArray, RegisterIndex, RegisterOrMemory}; use acvm::brillig_vm::brillig::HeapVector; use acvm::FieldElement; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use iter_extended::vecmap; use super::brillig_black_box::convert_black_box_call; +use super::brillig_block_variables::BlockVariables; use super::brillig_fn::FunctionContext; /// Generate the compilation artifacts for compiling a function into brillig bytecode. @@ -27,6 +30,10 @@ pub(crate) struct BrilligBlock<'block> { pub(crate) block_id: BasicBlockId, /// Context for creating brillig opcodes pub(crate) brillig_context: &'block mut BrilligContext, + /// Tracks the available variable during the codegen of the block + pub(crate) variables: BlockVariables, + /// For each instruction, the set of values that are not used anymore after it. + pub(crate) last_uses: HashMap>, } impl<'block> BrilligBlock<'block> { @@ -37,7 +44,21 @@ impl<'block> BrilligBlock<'block> { block_id: BasicBlockId, dfg: &DataFlowGraph, ) { - let mut brillig_block = BrilligBlock { function_context, block_id, brillig_context }; + let live_in = function_context.liveness.get_live_in(&block_id); + let variables = + BlockVariables::new(live_in.clone(), function_context.all_block_parameters()); + + brillig_context.set_allocated_registers( + variables + .get_available_variables(function_context) + .into_iter() + .flat_map(extract_registers) + .collect(), + ); + let last_uses = function_context.liveness.get_last_uses(&block_id).clone(); + + let mut brillig_block = + BrilligBlock { function_context, block_id, brillig_context, variables, last_uses }; brillig_block.convert_block(dfg); } @@ -59,6 +80,7 @@ impl<'block> BrilligBlock<'block> { // Process the block's terminator instruction let terminator_instruction = block.terminator().expect("block is expected to be constructed"); + self.convert_ssa_terminator(terminator_instruction, dfg); } @@ -110,27 +132,33 @@ impl<'block> BrilligBlock<'block> { self.create_block_label_for_current_function(*else_destination), ); } - TerminatorInstruction::Jmp { destination, arguments, call_stack: _ } => { - let target = &dfg[*destination]; - for (src, dest) in arguments.iter().zip(target.parameters()) { - // Destination variable might have already been created by another block that jumps to this target - let destination = self.function_context.get_or_create_variable( - self.brillig_context, + TerminatorInstruction::Jmp { + destination: destination_block, + arguments, + call_stack: _, + } => { + let target_block = &dfg[*destination_block]; + for (src, dest) in arguments.iter().zip(target_block.parameters()) { + // Destinations are block parameters so they should have been allocated previously. + let destination = self.variables.get_block_param( + self.function_context, + *destination_block, *dest, dfg, ); let source = self.convert_ssa_value(*src, dfg); self.pass_variable(source, destination); } - self.brillig_context - .jump_instruction(self.create_block_label_for_current_function(*destination)); + self.brillig_context.jump_instruction( + self.create_block_label_for_current_function(*destination_block), + ); } TerminatorInstruction::Return { return_values } => { let return_registers: Vec<_> = return_values .iter() .flat_map(|value_id| { let return_variable = self.convert_ssa_value(*value_id, dfg); - self.function_context.extract_registers(return_variable) + extract_registers(return_variable) }) .collect(); self.brillig_context.return_instruction(&return_registers); @@ -186,9 +214,9 @@ impl<'block> BrilligBlock<'block> { // Be a valid pointer to the array. // For slices, two registers are passed, the pointer to the data and a register holding the size of the slice. Type::Numeric(_) | Type::Array(..) | Type::Slice(..) | Type::Reference => { - // This parameter variable might have already been created by another block that jumps to this one. - self.function_context.get_or_create_variable( - self.brillig_context, + self.variables.get_block_param( + self.function_context, + self.block_id, *param_id, dfg, ); @@ -207,7 +235,8 @@ impl<'block> BrilligBlock<'block> { match instruction { Instruction::Binary(binary) => { - let result_register = self.function_context.create_register_variable( + let result_register = self.variables.define_register_variable( + self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, @@ -228,7 +257,8 @@ impl<'block> BrilligBlock<'block> { } Instruction::Allocate => { let result_value = dfg.instruction_results(instruction_id)[0]; - let address_register = self.function_context.create_register_variable( + let address_register = self.variables.define_register_variable( + self.function_context, self.brillig_context, result_value, dfg, @@ -242,7 +272,8 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.store_variable_instruction(address_register, source_variable); } Instruction::Load { address } => { - let target_variable = self.function_context.create_variable( + let target_variable = self.variables.define_variable( + self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, @@ -254,7 +285,8 @@ impl<'block> BrilligBlock<'block> { } Instruction::Not(value) => { let condition_register = self.convert_ssa_register_value(*value, dfg); - let result_register = self.function_context.create_register_variable( + let result_register = self.variables.define_register_variable( + self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, @@ -343,7 +375,8 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ArrayLen) => { - let result_register = self.function_context.create_register_variable( + let result_register = self.variables.define_register_variable( + self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, @@ -354,8 +387,7 @@ impl<'block> BrilligBlock<'block> { // or an array in the case of an array. if let Type::Numeric(_) = dfg.type_of_value(param_id) { let len_variable = self.convert_ssa_value(arguments[0], dfg); - let len_register_index = - self.function_context.extract_register(len_variable); + let len_register_index = extract_register(len_variable); self.brillig_context.mov_instruction(result_register, len_register_index); } else { self.convert_ssa_array_len(arguments[0], result_register, dfg); @@ -383,14 +415,16 @@ impl<'block> BrilligBlock<'block> { let results = dfg.instruction_results(instruction_id); - let target_len_variable = self.function_context.get_or_create_variable( + let target_len_variable = self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, ); - let target_len = self.function_context.extract_register(target_len_variable); + let target_len = extract_register(target_len_variable); - let target_slice = self.function_context.create_variable( + let target_slice = self.variables.define_variable( + self.function_context, self.brillig_context, results[1], dfg, @@ -415,14 +449,16 @@ impl<'block> BrilligBlock<'block> { let results = dfg.instruction_results(instruction_id); - let target_len_variable = self.function_context.get_or_create_variable( + let target_len_variable = self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, ); - let target_len = self.function_context.extract_register(target_len_variable); + let target_len = extract_register(target_len_variable); - let target_slice = self.function_context.create_variable( + let target_slice = self.variables.define_variable( + self.function_context, self.brillig_context, results[1], dfg, @@ -450,7 +486,8 @@ impl<'block> BrilligBlock<'block> { }, Instruction::Truncate { value, .. } => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.function_context.create_register_variable( + let destination_register = self.variables.define_register_variable( + self.function_context, self.brillig_context, result_ids[0], dfg, @@ -460,7 +497,8 @@ impl<'block> BrilligBlock<'block> { } Instruction::Cast(value, target_type) => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.function_context.create_register_variable( + let destination_register = self.variables.define_register_variable( + self.function_context, self.brillig_context, result_ids[0], dfg, @@ -475,8 +513,12 @@ impl<'block> BrilligBlock<'block> { } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); - let destination_variable = - self.function_context.create_variable(self.brillig_context, result_ids[0], dfg); + let destination_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + result_ids[0], + dfg, + ); let array_variable = self.convert_ssa_value(*array, dfg); let array_pointer = match array_variable { @@ -498,8 +540,12 @@ impl<'block> BrilligBlock<'block> { let value_variable = self.convert_ssa_value(*value, dfg); let result_ids = dfg.instruction_results(instruction_id); - let destination_variable = - self.function_context.create_variable(self.brillig_context, result_ids[0], dfg); + let destination_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + result_ids[0], + dfg, + ); self.convert_ssa_array_set( source_variable, @@ -511,6 +557,14 @@ impl<'block> BrilligBlock<'block> { _ => todo!("ICE: Instruction not supported {instruction:?}"), }; + let dead_variables = self + .last_uses + .get(&instruction_id) + .expect("Last uses for instruction should have been computed"); + + for dead_variable in dead_variables { + self.variables.remove_variable(dead_variable); + } self.brillig_context.set_call_stack(CallStack::new()); } @@ -526,7 +580,7 @@ impl<'block> BrilligBlock<'block> { .iter() .flat_map(|argument_id| { let variable_to_pass = self.convert_ssa_value(*argument_id, dfg); - self.function_context.extract_registers(variable_to_pass) + extract_registers(variable_to_pass) }) .collect(); @@ -535,8 +589,14 @@ impl<'block> BrilligBlock<'block> { // Create label for the function that will be called let label_of_function_to_call = FunctionContext::function_id_to_function_label(func_id); - let saved_registers = - self.brillig_context.pre_call_save_registers_prep_args(&argument_registers); + let variables_to_save = self.variables.get_available_variables(self.function_context); + + let saved_registers = self + .brillig_context + .pre_call_save_registers_prep_args(&argument_registers, &variables_to_save); + + // We don't save and restore constants, so we dump them before a external call since the callee might use the registers where they are allocated. + self.variables.dump_constants(); // Call instruction, which will interpret above registers 0..num args self.brillig_context.add_external_call_instruction(label_of_function_to_call); @@ -546,15 +606,18 @@ impl<'block> BrilligBlock<'block> { // Allocate the registers for the variables where we are assigning the returns let variables_assigned_to = vecmap(result_ids, |result_id| { - self.function_context.create_variable(self.brillig_context, *result_id, dfg) + self.variables.define_variable( + self.function_context, + self.brillig_context, + *result_id, + dfg, + ) }); // Collect the registers that should have been returned let returned_registers: Vec = variables_assigned_to .iter() - .flat_map(|returned_variable| { - self.function_context.extract_registers(*returned_variable) - }) + .flat_map(|returned_variable| extract_registers(*returned_variable)) .collect(); assert!( @@ -681,7 +744,8 @@ impl<'block> BrilligBlock<'block> { let results = dfg.instruction_results(instruction_id); match intrinsic { Value::Intrinsic(Intrinsic::SlicePushBack) => { - let target_len = match self.function_context.get_or_create_variable( + let target_len = match self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, @@ -690,8 +754,12 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: first value of a slice must be a register index"), }; - let target_variable = - self.function_context.create_variable(self.brillig_context, results[1], dfg); + let target_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + results[1], + dfg, + ); let target_vector = self.brillig_context.extract_heap_vector(target_variable); let item_values = vecmap(&arguments[2..element_size + 2], |arg| { @@ -703,7 +771,8 @@ impl<'block> BrilligBlock<'block> { self.slice_push_back_operation(target_vector, source_vector, &item_values); } Value::Intrinsic(Intrinsic::SlicePushFront) => { - let target_len = match self.function_context.get_or_create_variable( + let target_len = match self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, @@ -712,8 +781,12 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: first value of a slice must be a register index"), }; - let target_variable = - self.function_context.create_variable(self.brillig_context, results[1], dfg); + let target_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + results[1], + dfg, + ); let target_vector = self.brillig_context.extract_heap_vector(target_variable); let item_values = vecmap(&arguments[2..element_size + 2], |arg| { self.convert_ssa_value(*arg, dfg) @@ -724,7 +797,8 @@ impl<'block> BrilligBlock<'block> { self.slice_push_front_operation(target_vector, source_vector, &item_values); } Value::Intrinsic(Intrinsic::SlicePopBack) => { - let target_len = match self.function_context.get_or_create_variable( + let target_len = match self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, @@ -733,13 +807,22 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: first value of a slice must be a register index"), }; - let target_variable = - self.function_context.create_variable(self.brillig_context, results[1], dfg); + let target_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + results[1], + dfg, + ); let target_vector = self.brillig_context.extract_heap_vector(target_variable); let pop_variables = vecmap(&results[2..element_size + 2], |result| { - self.function_context.create_variable(self.brillig_context, *result, dfg) + self.variables.define_variable( + self.function_context, + self.brillig_context, + *result, + dfg, + ) }); self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); @@ -747,7 +830,8 @@ impl<'block> BrilligBlock<'block> { self.slice_pop_back_operation(target_vector, source_vector, &pop_variables); } Value::Intrinsic(Intrinsic::SlicePopFront) => { - let target_len = match self.function_context.get_or_create_variable( + let target_len = match self.variables.define_variable( + self.function_context, self.brillig_context, results[element_size], dfg, @@ -757,10 +841,16 @@ impl<'block> BrilligBlock<'block> { }; let pop_variables = vecmap(&results[0..element_size], |result| { - self.function_context.create_variable(self.brillig_context, *result, dfg) + self.variables.define_variable( + self.function_context, + self.brillig_context, + *result, + dfg, + ) }); - let target_variable = self.function_context.create_variable( + let target_variable = self.variables.define_variable( + self.function_context, self.brillig_context, results[element_size + 1], dfg, @@ -772,7 +862,8 @@ impl<'block> BrilligBlock<'block> { self.slice_pop_front_operation(target_vector, source_vector, &pop_variables); } Value::Intrinsic(Intrinsic::SliceInsert) => { - let target_len = match self.function_context.get_or_create_variable( + let target_len = match self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, @@ -782,8 +873,12 @@ impl<'block> BrilligBlock<'block> { }; let target_id = results[1]; - let target_variable = - self.function_context.create_variable(self.brillig_context, target_id, dfg); + let target_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + target_id, + dfg, + ); let target_vector = self.brillig_context.extract_heap_vector(target_variable); @@ -810,7 +905,8 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_register(converted_index); } Value::Intrinsic(Intrinsic::SliceRemove) => { - let target_len = match self.function_context.get_or_create_variable( + let target_len = match self.variables.define_variable( + self.function_context, self.brillig_context, results[0], dfg, @@ -821,8 +917,12 @@ impl<'block> BrilligBlock<'block> { let target_id = results[1]; - let target_variable = - self.function_context.create_variable(self.brillig_context, target_id, dfg); + let target_variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + target_id, + dfg, + ); let target_vector = self.brillig_context.extract_heap_vector(target_variable); // Remove if indexing in remove is changed to flattened indexing @@ -838,7 +938,12 @@ impl<'block> BrilligBlock<'block> { ); let removed_items = vecmap(&results[2..element_size + 2], |result| { - self.function_context.create_variable(self.brillig_context, *result, dfg) + self.variables.define_variable( + self.function_context, + self.brillig_context, + *result, + dfg, + ) }); self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); @@ -872,9 +977,8 @@ impl<'block> BrilligBlock<'block> { dfg: &DataFlowGraph, binary_op: BinaryIntOp, ) { - let source_len_variable = - self.function_context.get_or_create_variable(self.brillig_context, source_value, dfg); - let source_len = self.function_context.extract_register(source_len_variable); + let source_len_variable = self.convert_ssa_value(source_value, dfg); + let source_len = extract_register(source_len_variable); self.brillig_context.usize_op(source_len, target_len, binary_op, 1); } @@ -958,71 +1062,78 @@ impl<'block> BrilligBlock<'block> { /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> RegisterOrMemory { - let value = &dfg[dfg.resolve(value_id)]; + let value_id = dfg.resolve(value_id); + let value = &dfg[value_id]; match value { Value::Param { .. } | Value::Instruction { .. } => { // All block parameters and instruction results should have already been // converted to registers so we fetch from the cache. - self.function_context.get_variable(value_id, dfg) + self.variables.get_allocation(self.function_context, value_id, dfg) } Value::NumericConstant { constant, .. } => { // Constants might have been converted previously or not, so we get or create and // (re)initialize the value inside. - let new_variable = self.function_context.get_or_create_variable( - self.brillig_context, - value_id, - dfg, - ); - let register_index = self.function_context.extract_register(new_variable); + if let Some(variable) = self.variables.get_constant(value_id, dfg) { + variable + } else { + let new_variable = + self.variables.allocate_constant(self.brillig_context, value_id, dfg); + let register_index = extract_register(new_variable); - self.brillig_context.const_instruction(register_index, (*constant).into()); - new_variable + self.brillig_context.const_instruction(register_index, (*constant).into()); + new_variable + } } Value::Array { array, .. } => { - let new_variable = self.function_context.get_or_create_variable( - self.brillig_context, - value_id, - dfg, - ); + if let Some(variable) = self.variables.get_constant(value_id, dfg) { + variable + } else { + let new_variable = + self.variables.allocate_constant(self.brillig_context, value_id, dfg); - // Initialize the variable - let pointer = match new_variable { - RegisterOrMemory::HeapArray(heap_array) => { - self.brillig_context - .allocate_fixed_length_array(heap_array.pointer, array.len()); + // Initialize the variable + let pointer = match new_variable { + RegisterOrMemory::HeapArray(heap_array) => { + self.brillig_context + .allocate_fixed_length_array(heap_array.pointer, array.len()); - heap_array.pointer - } - RegisterOrMemory::HeapVector(heap_vector) => { - self.brillig_context - .const_instruction(heap_vector.size, array.len().into()); - self.brillig_context - .allocate_array_instruction(heap_vector.pointer, heap_vector.size); + heap_array.pointer + } + RegisterOrMemory::HeapVector(heap_vector) => { + self.brillig_context + .const_instruction(heap_vector.size, array.len().into()); + self.brillig_context + .allocate_array_instruction(heap_vector.pointer, heap_vector.size); - heap_vector.pointer + heap_vector.pointer + } + _ => unreachable!( + "ICE: Cannot initialize array value created as {new_variable:?}" + ), + }; + + // Write the items + + // Allocate a register for the iterator + let iterator_register = self.brillig_context.make_constant(0_usize.into()); + + for element_id in array.iter() { + let element_variable = self.convert_ssa_value(*element_id, dfg); + // Store the item in memory + self.store_variable_in_array(pointer, iterator_register, element_variable); + // Increment the iterator + self.brillig_context.usize_op_in_place( + iterator_register, + BinaryIntOp::Add, + 1, + ); } - _ => unreachable!( - "ICE: Cannot initialize array value created as {new_variable:?}" - ), - }; - - // Write the items - // Allocate a register for the iterator - let iterator_register = self.brillig_context.make_constant(0_usize.into()); + self.brillig_context.deallocate_register(iterator_register); - for element_id in array.iter() { - let element_variable = self.convert_ssa_value(*element_id, dfg); - // Store the item in memory - self.store_variable_in_array(pointer, iterator_register, element_variable); - // Increment the iterator - self.brillig_context.usize_op_in_place(iterator_register, BinaryIntOp::Add, 1); + new_variable } - - self.brillig_context.deallocate_register(iterator_register); - - new_variable } _ => { todo!("ICE: Cannot convert value {value:?}") @@ -1037,7 +1148,7 @@ impl<'block> BrilligBlock<'block> { dfg: &DataFlowGraph, ) -> RegisterIndex { let variable = self.convert_ssa_value(value_id, dfg); - self.function_context.extract_register(variable) + extract_register(variable) } fn allocate_external_call_result( @@ -1047,20 +1158,31 @@ impl<'block> BrilligBlock<'block> { ) -> RegisterOrMemory { let typ = dfg[result].get_type(); match typ { - Type::Numeric(_) => { - self.function_context.create_variable(self.brillig_context, result, dfg) - } + Type::Numeric(_) => self.variables.define_variable( + self.function_context, + self.brillig_context, + result, + dfg, + ), Type::Array(..) => { - let variable = - self.function_context.create_variable(self.brillig_context, result, dfg); - let array = self.function_context.extract_heap_array(variable); + let variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + result, + dfg, + ); + let array = extract_heap_array(variable); self.brillig_context.allocate_fixed_length_array(array.pointer, array.size); variable } Type::Slice(_) => { - let variable = - self.function_context.create_variable(self.brillig_context, result, dfg); + let variable = self.variables.define_variable( + self.function_context, + self.brillig_context, + result, + dfg, + ); let vector = self.brillig_context.extract_heap_vector(variable); // Set the pointer to the current stack frame diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs new file mode 100644 index 00000000000..c1e6754ef50 --- /dev/null +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -0,0 +1,198 @@ +use acvm::brillig_vm::brillig::{HeapArray, HeapVector, RegisterIndex, RegisterOrMemory}; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; + +use crate::{ + brillig::brillig_ir::{extract_register, BrilligContext}, + ssa::ir::{ + basic_block::BasicBlockId, + dfg::DataFlowGraph, + types::{CompositeType, Type}, + value::ValueId, + }, +}; + +use super::brillig_fn::FunctionContext; + +#[derive(Debug, Default)] +pub(crate) struct BlockVariables { + available_variables: HashSet, + available_constants: HashMap, +} + +impl BlockVariables { + /// Creates a BlockVariables instance. It uses the variables that are live in to the block and the global available variables (block parameters) + pub(crate) fn new(live_in: HashSet, all_block_parameters: HashSet) -> Self { + BlockVariables { + available_variables: live_in + .into_iter() + .chain(all_block_parameters.into_iter()) + .collect(), + ..Default::default() + } + } + + /// Returns all non-constant variables that have not been removed at this point. + pub(crate) fn get_available_variables( + &self, + function_context: &mut FunctionContext, + ) -> Vec { + self.available_variables + .iter() + .map(|value_id| { + function_context + .ssa_value_allocations + .get(value_id) + .unwrap_or_else(|| panic!("ICE: Value not found in cache {value_id}")) + }) + .cloned() + .collect() + } + + /// For a given SSA non constant value id, define the variable and return the corresponding cached allocation. + pub(crate) fn define_variable( + &mut self, + function_context: &mut FunctionContext, + brillig_context: &mut BrilligContext, + value_id: ValueId, + dfg: &DataFlowGraph, + ) -> RegisterOrMemory { + let value_id = dfg.resolve(value_id); + let variable = allocate_value(value_id, brillig_context, dfg); + + if function_context.ssa_value_allocations.insert(value_id, variable).is_some() { + unreachable!("ICE: ValueId {value_id:?} was already in cache"); + } + + self.available_variables.insert(value_id); + + variable + } + + /// Defines a variable that fits in a single register and returns the allocated register. + pub(crate) fn define_register_variable( + &mut self, + function_context: &mut FunctionContext, + brillig_context: &mut BrilligContext, + value: ValueId, + dfg: &DataFlowGraph, + ) -> RegisterIndex { + let variable = self.define_variable(function_context, brillig_context, value, dfg); + extract_register(variable) + } + + /// Removes a variable so it's not used anymore within this block. + pub(crate) fn remove_variable(&mut self, value_id: &ValueId) { + self.available_variables.remove(value_id); + } + + /// For a given SSA value id, return the corresponding cached allocation. + pub(crate) fn get_allocation( + &mut self, + function_context: &FunctionContext, + value_id: ValueId, + dfg: &DataFlowGraph, + ) -> RegisterOrMemory { + let value_id = dfg.resolve(value_id); + if let Some(constant) = self.available_constants.get(&value_id) { + *constant + } else { + assert!( + self.available_variables.contains(&value_id), + "ICE: ValueId {:?} is not available", + value_id + ); + + *function_context + .ssa_value_allocations + .get(&value_id) + .unwrap_or_else(|| panic!("ICE: Value not found in cache {value_id}")) + } + } + + /// Creates a constant. Constants are a special case in SSA, since they are "defined" every time they are used. + /// We keep constants block-local. + pub(crate) fn allocate_constant( + &mut self, + brillig_context: &mut BrilligContext, + value_id: ValueId, + dfg: &DataFlowGraph, + ) -> RegisterOrMemory { + let value_id = dfg.resolve(value_id); + let constant = allocate_value(value_id, brillig_context, dfg); + self.available_constants.insert(value_id, constant); + constant + } + + /// Gets a constant. + pub(crate) fn get_constant( + &mut self, + value_id: ValueId, + dfg: &DataFlowGraph, + ) -> Option { + let value_id = dfg.resolve(value_id); + self.available_constants.get(&value_id).cloned() + } + + /// Removes the allocations of all constants. Constants will need to be reallocated and reinitialized after this. + pub(crate) fn dump_constants(&mut self) { + self.available_constants.clear(); + } + + /// For a given block parameter, return the allocation that was done globally to the function. + pub(crate) fn get_block_param( + &mut self, + function_context: &FunctionContext, + block_id: BasicBlockId, + value_id: ValueId, + dfg: &DataFlowGraph, + ) -> RegisterOrMemory { + let value_id = dfg.resolve(value_id); + assert!( + function_context + .block_parameters + .get(&block_id) + .expect("Block not found") + .contains(&value_id), + "Value is not a block parameter" + ); + + *function_context.ssa_value_allocations.get(&value_id).expect("Block param not found") + } +} + +/// Computes the length of an array. This will match with the indexes that SSA will issue +pub(crate) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) -> usize { + item_typ.len() * elem_count +} + +/// For a given value_id, allocates the necessary registers to hold it. +pub(crate) fn allocate_value( + value_id: ValueId, + brillig_context: &mut BrilligContext, + dfg: &DataFlowGraph, +) -> RegisterOrMemory { + let typ = dfg.type_of_value(value_id); + + match typ { + Type::Numeric(_) | Type::Reference => { + let register = brillig_context.allocate_register(); + RegisterOrMemory::RegisterIndex(register) + } + Type::Array(item_typ, elem_count) => { + let pointer_register = brillig_context.allocate_register(); + let size = compute_array_length(&item_typ, elem_count); + RegisterOrMemory::HeapArray(HeapArray { pointer: pointer_register, size }) + } + Type::Slice(_) => { + let pointer_register = brillig_context.allocate_register(); + let size_register = brillig_context.allocate_register(); + RegisterOrMemory::HeapVector(HeapVector { + pointer: pointer_register, + size: size_register, + }) + } + Type::Function => { + unreachable!("ICE: Function values should have been removed from the SSA") + } + } +} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 1ea16fd375e..ec72ceb2909 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -1,4 +1,4 @@ -use acvm::brillig_vm::brillig::{HeapArray, HeapVector, RegisterIndex, RegisterOrMemory}; +use acvm::brillig_vm::brillig::RegisterOrMemory; use iter_extended::vecmap; use crate::{ @@ -7,127 +7,62 @@ use crate::{ BrilligContext, }, ssa::ir::{ - dfg::DataFlowGraph, + basic_block::BasicBlockId, function::{Function, FunctionId}, - types::{CompositeType, Type}, + post_order::PostOrder, + types::Type, value::ValueId, }, }; -use fxhash::FxHashMap as HashMap; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; + +use super::{brillig_block_variables::allocate_value, variable_liveness::VariableLiveness}; pub(crate) struct FunctionContext { pub(crate) function_id: FunctionId, - /// Map from SSA values to register or memory. - pub(crate) ssa_value_to_brillig_variable: HashMap, + /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. + pub(crate) ssa_value_allocations: HashMap, + /// Block parameters are pre allocated at the function level. + pub(crate) block_parameters: HashMap>, + /// The block ids of the function in reverse post order. + pub(crate) blocks: Vec, + /// Liveness information for each variable in the function. + pub(crate) liveness: VariableLiveness, } impl FunctionContext { - /// For a given SSA value id, create and cache the a corresponding variable. - /// This will allocate the needed registers for the variable. - pub(crate) fn create_variable( - &mut self, - brillig_context: &mut BrilligContext, - value: ValueId, - dfg: &DataFlowGraph, - ) -> RegisterOrMemory { - let value = dfg.resolve(value); - let typ = dfg.type_of_value(value); - - let variable = match typ { - Type::Numeric(_) | Type::Reference => { - let register = brillig_context.allocate_register(); - RegisterOrMemory::RegisterIndex(register) - } - Type::Array(item_typ, elem_count) => { - let pointer_register = brillig_context.allocate_register(); - let size = compute_array_length(&item_typ, elem_count); - RegisterOrMemory::HeapArray(HeapArray { pointer: pointer_register, size }) - } - Type::Slice(_) => { - let pointer_register = brillig_context.allocate_register(); - let size_register = brillig_context.allocate_register(); - RegisterOrMemory::HeapVector(HeapVector { - pointer: pointer_register, - size: size_register, - }) - } - Type::Function => { - unreachable!("ICE: Function values should have been removed from the SSA") - } - }; - - // Cache the `ValueId` so that if we call get_variable, it will - // return the registers that have just been created. - // - // WARNING: This assumes that a registers won't be reused for a different value. - // If you overwrite the registers, then the cache will be invalid. - - if self.ssa_value_to_brillig_variable.insert(value, variable).is_some() { - unreachable!("ICE: ValueId {value:?} was already in cache"); - } - - variable - } - - /// For a given SSA value id, return the corresponding cached variable. - pub(crate) fn get_variable(&mut self, value: ValueId, dfg: &DataFlowGraph) -> RegisterOrMemory { - let value = dfg.resolve(value); - *self - .ssa_value_to_brillig_variable - .get(&value) - .unwrap_or_else(|| panic!("ICE: Value not found in cache {value}")) - } - - pub(crate) fn get_or_create_variable( - &mut self, - brillig_context: &mut BrilligContext, - value: ValueId, - dfg: &DataFlowGraph, - ) -> RegisterOrMemory { - let value = dfg.resolve(value); - if let Some(variable) = self.ssa_value_to_brillig_variable.get(&value) { - return *variable; + /// Creates a new function context. It will allocate parameters for all blocks and compute the liveness of every variable. + pub(crate) fn new(function: &Function, brillig_context: &mut BrilligContext) -> Self { + let id = function.id(); + + let mut reverse_post_order = Vec::new(); + reverse_post_order.extend_from_slice(PostOrder::with_function(function).as_slice()); + reverse_post_order.reverse(); + + let mut block_parameters = HashMap::default(); + let mut ssa_variable_to_register_or_memory = HashMap::default(); + + for &block_id in &reverse_post_order { + let block = &function.dfg[block_id]; + let parameters = block.parameters().to_vec(); + parameters.iter().for_each(|&value_id| { + let variable = allocate_value(value_id, brillig_context, &function.dfg); + ssa_variable_to_register_or_memory.insert(value_id, variable); + }); + block_parameters.insert(block_id, parameters); } - self.create_variable(brillig_context, value, dfg) - } - - /// Creates a variable that fits in a single register and returns the register. - pub(crate) fn create_register_variable( - &mut self, - brillig_context: &mut BrilligContext, - value: ValueId, - dfg: &DataFlowGraph, - ) -> RegisterIndex { - let variable = self.create_variable(brillig_context, value, dfg); - self.extract_register(variable) - } - - pub(crate) fn extract_register(&self, variable: RegisterOrMemory) -> RegisterIndex { - match variable { - RegisterOrMemory::RegisterIndex(register_index) => register_index, - _ => unreachable!("ICE: Expected register, got {variable:?}"), + Self { + function_id: id, + ssa_value_allocations: ssa_variable_to_register_or_memory, + block_parameters, + blocks: reverse_post_order, + liveness: VariableLiveness::from_function(function), } } - pub(crate) fn extract_heap_array(&self, variable: RegisterOrMemory) -> HeapArray { - match variable { - RegisterOrMemory::HeapArray(array) => array, - _ => unreachable!("ICE: Expected array, got {variable:?}"), - } - } - - /// Collects the registers that a given variable is stored in. - pub(crate) fn extract_registers(&self, variable: RegisterOrMemory) -> Vec { - match variable { - RegisterOrMemory::RegisterIndex(register_index) => vec![register_index], - RegisterOrMemory::HeapArray(array) => { - vec![array.pointer] - } - RegisterOrMemory::HeapVector(vector) => { - vec![vector.pointer, vector.size] - } - } + pub(crate) fn all_block_parameters(&self) -> HashSet { + self.block_parameters.values().flat_map(|parameters| parameters.iter()).cloned().collect() } /// Creates a function label from a given SSA function id. @@ -175,8 +110,3 @@ impl FunctionContext { .collect() } } - -/// Computes the length of an array. This will match with the indexes that SSA will issue -pub(crate) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) -> usize { - item_typ.len() * elem_count -} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index e46cc55c3ea..445f2769692 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -317,27 +317,38 @@ mod tests { use acvm::brillig_vm::brillig::{RegisterIndex, RegisterOrMemory}; use crate::brillig::brillig_gen::brillig_block::BrilligBlock; + use crate::brillig::brillig_gen::brillig_block_variables::BlockVariables; use crate::brillig::brillig_gen::brillig_fn::FunctionContext; use crate::brillig::brillig_ir::artifact::BrilligParameter; use crate::brillig::brillig_ir::tests::{create_and_run_vm, create_context}; use crate::brillig::brillig_ir::BrilligContext; + use crate::ssa::function_builder::FunctionBuilder; + use crate::ssa::ir::function::RuntimeType; use crate::ssa::ir::map::Id; - use fxhash::FxHashMap as HashMap; - - fn create_test_environment() -> (FunctionContext, BrilligContext) { - let function_context = FunctionContext { - function_id: Id::test_new(0), - ssa_value_to_brillig_variable: HashMap::default(), - }; - let brillig_context = create_context(); - (function_context, brillig_context) + use crate::ssa::ssa_gen::Ssa; + + fn create_test_environment() -> (Ssa, FunctionContext, BrilligContext) { + let builder = + FunctionBuilder::new("main".to_string(), Id::test_new(0), RuntimeType::Brillig); + let ssa = builder.finish(); + let mut brillig_context = create_context(); + + let function_context = FunctionContext::new(ssa.main(), &mut brillig_context); + (ssa, function_context, brillig_context) } fn create_brillig_block<'a>( function_context: &'a mut FunctionContext, brillig_context: &'a mut BrilligContext, ) -> BrilligBlock<'a> { - BrilligBlock { function_context, block_id: Id::test_new(0), brillig_context } + let variables = BlockVariables::default(); + BrilligBlock { + function_context, + block_id: Id::test_new(0), + brillig_context, + variables, + last_uses: Default::default(), + } } #[test] @@ -357,7 +368,7 @@ mod tests { BrilligParameter::Simple, ]; - let (mut function_context, mut context) = create_test_environment(); + let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters let array_pointer = context.allocate_register(); @@ -450,7 +461,7 @@ mod tests { BrilligParameter::Simple, ]; - let (mut function_context, mut context) = create_test_environment(); + let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters let array_pointer = context.allocate_register(); @@ -547,7 +558,7 @@ mod tests { BrilligParameter::Simple, ]; - let (mut function_context, mut context) = create_test_environment(); + let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters let array_pointer = context.allocate_register(); @@ -670,7 +681,7 @@ mod tests { BrilligParameter::Simple, ]; - let (mut function_context, mut context) = create_test_environment(); + let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters let array_pointer = context.allocate_register(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs new file mode 100644 index 00000000000..eeea627a308 --- /dev/null +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -0,0 +1,541 @@ +//! This module analyzes the liveness of variables (non-constant values) throughout a function. +//! It uses the approach detailed in the section 4.2 of this paper https://inria.hal.science/inria-00558509v2/document +use crate::ssa::ir::{ + basic_block::{BasicBlock, BasicBlockId}, + cfg::ControlFlowGraph, + dfg::DataFlowGraph, + dom::DominatorTree, + function::Function, + instruction::{Instruction, InstructionId}, + post_order::PostOrder, + value::{Value, ValueId}, +}; + +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; + +/// A back edge is an edge from a node to one of its ancestors. It denotes a loop in the CFG. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +struct BackEdge { + header: BasicBlockId, + start: BasicBlockId, +} + +fn find_back_edges( + func: &Function, + cfg: &ControlFlowGraph, + post_order: &PostOrder, +) -> HashSet { + let mut tree = DominatorTree::with_cfg_and_post_order(cfg, post_order); + let mut back_edges = HashSet::default(); + + for block_id in func.reachable_blocks() { + let block = &func.dfg[block_id]; + let successors = block.successors(); + for successor_id in successors { + if tree.dominates(successor_id, block_id) { + back_edges.insert(BackEdge { start: block_id, header: successor_id }); + } + } + } + + back_edges +} + +/// Collects the underlying variables inside a value id. It might be more than one, for example in constant arrays that are constructed with multiple vars. +fn collect_variables_of_value(value_id: ValueId, dfg: &DataFlowGraph) -> Vec { + let value_id = dfg.resolve(value_id); + let value = &dfg[value_id]; + + match value { + Value::Instruction { .. } | Value::Param { .. } => { + vec![value_id] + } + // Literal arrays are constants, but might use variable values to initialise. + Value::Array { array, .. } => { + let mut value_ids = Vec::new(); + + array.iter().for_each(|item_id| { + let underlying_ids = collect_variables_of_value(*item_id, dfg); + value_ids.extend(underlying_ids); + }); + + value_ids + } + // Functions are not variables in a defunctionalized SSA. Only constant function values should appear. + Value::ForeignFunction(_) + | Value::Function(_) + | Value::Intrinsic(..) + // Constants are not treated as variables for the variable liveness analysis, since they are defined every time they are used. + | Value::NumericConstant { .. } => { + vec![] + } + } +} + +fn variables_used_in_instruction(instruction: &Instruction, dfg: &DataFlowGraph) -> Vec { + let mut used = Vec::new(); + + instruction.for_each_value(|value_id| { + let underlying_ids = collect_variables_of_value(value_id, dfg); + used.extend(underlying_ids); + }); + + used +} + +fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Vec { + let mut used: Vec = block + .instructions() + .iter() + .flat_map(|instruction_id| { + let instruction = &dfg[*instruction_id]; + variables_used_in_instruction(instruction, dfg) + }) + .collect(); + + if let Some(terminator) = block.terminator() { + terminator.for_each_value(|value_id| { + used.extend(collect_variables_of_value(value_id, dfg)); + }); + } + + used +} + +type Variables = HashSet; + +fn compute_defined_variables(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables { + let mut defined_vars = HashSet::default(); + + for parameter in block.parameters() { + defined_vars.insert(dfg.resolve(*parameter)); + } + + for instruction_id in block.instructions() { + let result_values = dfg.instruction_results(*instruction_id); + for result_value in result_values { + defined_vars.insert(dfg.resolve(*result_value)); + } + } + + defined_vars +} + +fn compute_used_before_def( + block: &BasicBlock, + dfg: &DataFlowGraph, + defined_in_block: &Variables, +) -> Variables { + variables_used_in_block(block, dfg) + .into_iter() + .filter(|id| !defined_in_block.contains(id)) + .collect() +} + +type LastUses = HashMap; + +/// A struct representing the liveness of variables throughout a function. +pub(crate) struct VariableLiveness { + cfg: ControlFlowGraph, + post_order: PostOrder, + /// The variables that are alive before the block starts executing + live_in: HashMap, + /// The variables that stop being alive after each specific instruction + last_uses: HashMap, +} + +impl VariableLiveness { + /// Computes the liveness of variables throughout a function. + pub(crate) fn from_function(func: &Function) -> Self { + let cfg = ControlFlowGraph::with_function(func); + let post_order = PostOrder::with_function(func); + + let mut instance = + Self { cfg, post_order, live_in: HashMap::default(), last_uses: HashMap::default() }; + + instance.compute_live_in_of_blocks(func); + + instance.compute_last_uses(func); + + instance + } + + /// The set of values that are alive before the block starts executing + pub(crate) fn get_live_in(&self, block_id: &BasicBlockId) -> &Variables { + self.live_in.get(block_id).expect("Live ins should have been calculated") + } + + /// The set of values that are alive after the block has finished executed + pub(crate) fn get_live_out(&self, block_id: &BasicBlockId) -> Variables { + let mut live_out = HashSet::default(); + for successor_id in self.cfg.successors(*block_id) { + live_out.extend(self.get_live_in(&successor_id)); + } + live_out + } + + /// A map of instruction id to the set of values that die after the instruction has executed + pub(crate) fn get_last_uses(&self, block_id: &BasicBlockId) -> &LastUses { + self.last_uses.get(block_id).expect("Last uses should have been calculated") + } + + fn compute_live_in_of_blocks(&mut self, func: &Function) { + let back_edges = find_back_edges(func, &self.cfg, &self.post_order); + + // First pass, propagate up the live_ins skipping back edges + self.compute_live_in_recursive(func, func.entry_block(), &back_edges); + + // Second pass, propagate header live_ins to the loop bodies + for back_edge in back_edges { + self.update_live_ins_within_loop(back_edge); + } + } + + fn compute_live_in_recursive( + &mut self, + func: &Function, + block_id: BasicBlockId, + back_edges: &HashSet, + ) { + let block = &func.dfg[block_id]; + + let defined = compute_defined_variables(block, &func.dfg); + let used_before_def = compute_used_before_def(block, &func.dfg, &defined); + + let mut live_out = HashSet::default(); + + for successor_id in block.successors() { + if !back_edges.contains(&BackEdge { start: block_id, header: successor_id }) { + if !self.live_in.contains_key(&successor_id) { + self.compute_live_in_recursive(func, successor_id, back_edges); + } + live_out.extend( + self.live_in + .get(&successor_id) + .expect("Live ins for successor should have been calculated"), + ); + } + } + + // live_in[BlockId] = before_def[BlockId] union (live_out[BlockId] - killed[BlockId]) + let passthrough_vars = live_out.difference(&defined).cloned().collect(); + self.live_in.insert(block_id, used_before_def.union(&passthrough_vars).cloned().collect()); + } + + fn update_live_ins_within_loop(&mut self, back_edge: BackEdge) { + let header_live_ins = self + .live_in + .get(&back_edge.header) + .expect("Live ins should have been calculated") + .clone(); + + let body = self.compute_loop_body(back_edge); + for body_block_id in body { + self.live_in + .get_mut(&body_block_id) + .expect("Live ins should have been calculated") + .extend(&header_live_ins); + } + } + + fn compute_loop_body(&self, edge: BackEdge) -> HashSet { + let mut loop_blocks = HashSet::default(); + loop_blocks.insert(edge.header); + loop_blocks.insert(edge.start); + + let mut stack = vec![edge.start]; + + while let Some(block) = stack.pop() { + for predecessor in self.cfg.predecessors(block) { + if !loop_blocks.contains(&predecessor) { + loop_blocks.insert(predecessor); + stack.push(predecessor); + } + } + } + + loop_blocks + } + + fn compute_last_uses(&mut self, func: &Function) { + for block_id in func.reachable_blocks() { + let block = &func.dfg[block_id]; + let live_out = self.get_live_out(&block_id); + + let mut used_after: Variables = Default::default(); + let mut block_last_uses: LastUses = Default::default(); + + // First, handle the terminator + if let Some(terminator_instruction) = block.terminator() { + terminator_instruction.for_each_value(|value_id| { + let underlying_vars = collect_variables_of_value(value_id, &func.dfg); + used_after.extend(underlying_vars); + }); + } + + // Then, handle the instructions in reverse order to find the last use + for instruction_id in block.instructions().iter().rev() { + let instruction = &func.dfg[*instruction_id]; + let instruction_last_uses = variables_used_in_instruction(instruction, &func.dfg) + .into_iter() + .filter(|id| !used_after.contains(id) && !live_out.contains(id)) + .collect(); + + used_after.extend(&instruction_last_uses); + block_last_uses.insert(*instruction_id, instruction_last_uses); + } + + self.last_uses.insert(block_id, block_last_uses); + } + } +} + +#[cfg(test)] +mod test { + use fxhash::FxHashSet; + + use crate::brillig::brillig_gen::variable_liveness::VariableLiveness; + use crate::ssa::function_builder::FunctionBuilder; + use crate::ssa::ir::function::RuntimeType; + use crate::ssa::ir::instruction::BinaryOp; + use crate::ssa::ir::map::Id; + use crate::ssa::ir::types::Type; + + #[test] + fn simple_back_propagation() { + // brillig fn main f0 { + // b0(v0: Field, v1: Field): + // v3 = allocate + // store Field 0 at v3 + // v4 = eq v0, Field 0 + // jmpif v4 then: b1, else: b2 + // b2(): + // v7 = add v0, Field 27 + // store v7 at v3 + // jmp b3() + // b1(): + // v6 = add v1, Field 27 + // store v6 at v3 + // jmp b3() + // b3(): + // v8 = load v3 + // return v8 + // } + + let main_id = Id::test_new(1); + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Brillig); + + let b1 = builder.insert_block(); + let b2 = builder.insert_block(); + let b3 = builder.insert_block(); + + let v0 = builder.add_parameter(Type::field()); + let v1 = builder.add_parameter(Type::field()); + + let v3 = builder.insert_allocate(); + + let zero = builder.numeric_constant(0u128, Type::field()); + builder.insert_store(v3, zero); + + let v4 = builder.insert_binary(v0, BinaryOp::Eq, zero); + + builder.terminate_with_jmpif(v4, b1, b2); + + builder.switch_to_block(b2); + + let twenty_seven = builder.numeric_constant(27u128, Type::field()); + let v7 = builder.insert_binary(v0, BinaryOp::Add, twenty_seven); + builder.insert_store(v3, v7); + + builder.terminate_with_jmp(b3, vec![]); + + builder.switch_to_block(b1); + + let v6 = builder.insert_binary(v1, BinaryOp::Add, twenty_seven); + builder.insert_store(v3, v6); + + builder.terminate_with_jmp(b3, vec![]); + + builder.switch_to_block(b3); + + let v8 = builder.insert_load(v3, Type::field()); + + builder.terminate_with_return(vec![v8]); + + let ssa = builder.finish(); + let func = ssa.main(); + let liveness = VariableLiveness::from_function(func); + + assert!(liveness.get_live_in(&func.entry_block()).is_empty()); + assert_eq!(liveness.get_live_in(&b2), &FxHashSet::from_iter([v3, v0].into_iter())); + assert_eq!(liveness.get_live_in(&b1), &FxHashSet::from_iter([v3, v1].into_iter())); + assert_eq!(liveness.get_live_in(&b3), &FxHashSet::from_iter([v3].into_iter())); + + let block_1 = &func.dfg[b1]; + let block_2 = &func.dfg[b2]; + let block_3 = &func.dfg[b3]; + assert_eq!( + liveness.get_last_uses(&b1).get(&block_1.instructions()[0]), + Some(&FxHashSet::from_iter([v1].into_iter())) + ); + assert_eq!( + liveness.get_last_uses(&b2).get(&block_2.instructions()[0]), + Some(&FxHashSet::from_iter([v0].into_iter())) + ); + assert_eq!( + liveness.get_last_uses(&b3).get(&block_3.instructions()[0]), + Some(&FxHashSet::from_iter([v3].into_iter())) + ); + } + + #[test] + fn propagation_with_nested_loops() { + // brillig fn main f0 { + // b0(v0: Field, v1: Field): + // v3 = allocate + // store Field 0 at v3 + // jmp b1(Field 0) + // b1(v4: Field): + // v5 = lt v4, v0 + // jmpif v5 then: b2, else: b3 + // b3(): + // v17 = load v3 + // return v17 + // b2(): + // v6 = mul v4, v4 + // jmp b4(v0) + // b4(v7: Field): + // v8 = lt v7, v1 + // jmpif v8 then: b5, else: b6 + // b6(): + // v16 = add v4, Field 1 + // jmp b1(v16) + // b5(): + // v10 = eq v7, Field 27 + // v11 = not v10 + // jmpif v11 then: b7, else: b8 + // b7(): + // v12 = load v3 + // v13 = add v12, v6 + // store v13 at v3 + // jmp b8() + // b8(): + // v15 = add v7, Field 1 + // jmp b4(v15) + // } + + let main_id = Id::test_new(1); + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Brillig); + + let b1 = builder.insert_block(); + let b2 = builder.insert_block(); + let b3 = builder.insert_block(); + let b4 = builder.insert_block(); + let b5 = builder.insert_block(); + let b6 = builder.insert_block(); + let b7 = builder.insert_block(); + let b8 = builder.insert_block(); + + let v0 = builder.add_parameter(Type::field()); + let v1 = builder.add_parameter(Type::field()); + + let v3 = builder.insert_allocate(); + + let zero = builder.numeric_constant(0u128, Type::field()); + builder.insert_store(v3, zero); + + builder.terminate_with_jmp(b1, vec![zero]); + + builder.switch_to_block(b1); + let v4 = builder.add_block_parameter(b1, Type::field()); + + let v5 = builder.insert_binary(v4, BinaryOp::Lt, v0); + + builder.terminate_with_jmpif(v5, b2, b3); + + builder.switch_to_block(b2); + + let v6 = builder.insert_binary(v4, BinaryOp::Mul, v4); + + builder.terminate_with_jmp(b4, vec![v0]); + + builder.switch_to_block(b4); + + let v7 = builder.add_block_parameter(b4, Type::field()); + + let v8 = builder.insert_binary(v7, BinaryOp::Lt, v1); + + builder.terminate_with_jmpif(v8, b5, b6); + + builder.switch_to_block(b5); + + let twenty_seven = builder.numeric_constant(27u128, Type::field()); + let v10 = builder.insert_binary(v7, BinaryOp::Eq, twenty_seven); + + let v11 = builder.insert_not(v10); + + builder.terminate_with_jmpif(v11, b7, b8); + + builder.switch_to_block(b7); + + let v12 = builder.insert_load(v3, Type::field()); + + let v13 = builder.insert_binary(v12, BinaryOp::Add, v6); + + builder.insert_store(v3, v13); + + builder.terminate_with_jmp(b8, vec![]); + + builder.switch_to_block(b8); + + let one = builder.numeric_constant(1u128, Type::field()); + let v15 = builder.insert_binary(v7, BinaryOp::Add, one); + + builder.terminate_with_jmp(b4, vec![v15]); + + builder.switch_to_block(b6); + + let v16 = builder.insert_binary(v4, BinaryOp::Add, one); + + builder.terminate_with_jmp(b1, vec![v16]); + + builder.switch_to_block(b3); + + let v17 = builder.insert_load(v3, Type::field()); + + builder.terminate_with_return(vec![v17]); + + let ssa = builder.finish(); + let func = ssa.main(); + + let liveness = VariableLiveness::from_function(func); + + assert!(liveness.get_live_in(&func.entry_block()).is_empty()); + assert_eq!(liveness.get_live_in(&b1), &FxHashSet::from_iter([v0, v1, v3].into_iter())); + assert_eq!(liveness.get_live_in(&b3), &FxHashSet::from_iter([v3].into_iter())); + assert_eq!(liveness.get_live_in(&b2), &FxHashSet::from_iter([v0, v1, v3, v4].into_iter())); + assert_eq!( + liveness.get_live_in(&b4), + &FxHashSet::from_iter([v0, v1, v3, v4, v6].into_iter()) + ); + assert_eq!(liveness.get_live_in(&b6), &FxHashSet::from_iter([v0, v1, v3, v4].into_iter())); + assert_eq!( + liveness.get_live_in(&b5), + &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) + ); + assert_eq!( + liveness.get_live_in(&b7), + &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) + ); + assert_eq!( + liveness.get_live_in(&b8), + &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7].into_iter()) + ); + + let block_3 = &func.dfg[b3]; + assert_eq!( + liveness.get_last_uses(&b3).get(&block_3.instructions()[0]), + Some(&FxHashSet::from_iter([v3].into_iter())) + ); + } +} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 2b5ccaeb88c..d1ce1b551b2 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -104,6 +104,10 @@ impl BrilligContext { } } + pub(crate) fn set_allocated_registers(&mut self, allocated_registers: Vec) { + self.registers = BrilligRegistersContext::from_preallocated_registers(allocated_registers); + } + /// Adds a brillig instruction to the brillig byte code pub(crate) fn push_opcode(&mut self, opcode: BrilligOpcode) { self.obj.push_opcode(opcode); @@ -243,7 +247,7 @@ impl BrilligContext { /// This instruction will issue a loop that will iterate iteration_count times /// The body of the loop should be issued by the caller in the on_iteration closure. - fn loop_instruction(&mut self, iteration_count: RegisterIndex, on_iteration: F) + pub(crate) fn loop_instruction(&mut self, iteration_count: RegisterIndex, on_iteration: F) where F: FnOnce(&mut BrilligContext, RegisterIndex), { @@ -721,13 +725,14 @@ impl BrilligContext { } /// Saves all of the registers that have been used up until this point. - fn save_all_used_registers(&mut self) -> Vec { + fn save_registers_of_vars(&mut self, vars: &[RegisterOrMemory]) -> Vec { // Save all of the used registers at this point in memory // because the function call will/may overwrite them. // // Note that here it is important that the stack pointer register is at register 0, // as after the first register save we add to the pointer. - let mut used_registers: Vec<_> = self.registers.used_registers_iter().collect(); + let mut used_registers: Vec<_> = + vars.iter().flat_map(|var| extract_registers(*var)).collect(); // Also dump the previous stack pointer used_registers.push(ReservedRegisters::previous_stack_pointer()); @@ -806,9 +811,10 @@ impl BrilligContext { pub(crate) fn pre_call_save_registers_prep_args( &mut self, arguments: &[RegisterIndex], + variables_to_save: &[RegisterOrMemory], ) -> Vec { // Save all the registers we have used to the stack. - let saved_registers = self.save_all_used_registers(); + let saved_registers = self.save_registers_of_vars(variables_to_save); // Move argument values to the front of the registers // @@ -961,6 +967,33 @@ impl BrilligContext { } } +pub(crate) fn extract_register(variable: RegisterOrMemory) -> RegisterIndex { + match variable { + RegisterOrMemory::RegisterIndex(register_index) => register_index, + _ => unreachable!("ICE: Expected register, got {variable:?}"), + } +} + +pub(crate) fn extract_heap_array(variable: RegisterOrMemory) -> HeapArray { + match variable { + RegisterOrMemory::HeapArray(array) => array, + _ => unreachable!("ICE: Expected array, got {variable:?}"), + } +} + +/// Collects the registers that a given variable is stored in. +pub(crate) fn extract_registers(variable: RegisterOrMemory) -> Vec { + match variable { + RegisterOrMemory::RegisterIndex(register_index) => vec![register_index], + RegisterOrMemory::HeapArray(array) => { + vec![array.pointer] + } + RegisterOrMemory::HeapVector(vector) => { + vec![vector.pointer, vector.size] + } + } +} + /// Type to encapsulate the binary operation types in Brillig #[derive(Clone)] pub(crate) enum BrilligBinaryOp { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs index 2a04352694c..e7ab1492acb 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/registers.rs @@ -8,8 +8,6 @@ use super::ReservedRegisters; /// Each has a stack base pointer from which all stack allocations can be offset. pub(crate) struct BrilligRegistersContext { /// A free-list of registers that have been deallocated and can be used again. - /// TODO(AD): currently, register deallocation is only done with immediate values. - /// TODO(AD): See https://github.com/noir-lang/noir/issues/1720 deallocated_registers: Vec, /// A usize indicating the next un-used register. next_free_register_index: usize, @@ -17,13 +15,35 @@ pub(crate) struct BrilligRegistersContext { impl BrilligRegistersContext { /// Initial register allocation - pub(crate) fn new() -> BrilligRegistersContext { - BrilligRegistersContext { + pub(crate) fn new() -> Self { + Self { deallocated_registers: Vec::new(), next_free_register_index: ReservedRegisters::len(), } } + /// Creates a new register context from a set of registers allocated previously. + pub(crate) fn from_preallocated_registers(preallocated_registers: Vec) -> Self { + let next_free_register_index = preallocated_registers.iter().fold( + ReservedRegisters::len(), + |free_register_index, preallocated_register| { + if preallocated_register.to_usize() < free_register_index { + free_register_index + } else { + preallocated_register.to_usize() + 1 + } + }, + ); + let mut deallocated_registers = Vec::new(); + for i in ReservedRegisters::len()..next_free_register_index { + if !preallocated_registers.contains(&RegisterIndex::from(i)) { + deallocated_registers.push(RegisterIndex::from(i)); + } + } + + Self { deallocated_registers, next_free_register_index } + } + /// Ensures a register is allocated. pub(crate) fn ensure_register_is_allocated(&mut self, register: RegisterIndex) { let index = register.to_usize(); @@ -36,14 +56,6 @@ impl BrilligRegistersContext { } } - /// Lazily iterate over the used registers, - /// counting to next_free_register_index while excluding deallocated and reserved registers. - pub(crate) fn used_registers_iter(&self) -> impl Iterator + '_ { - (ReservedRegisters::NUM_RESERVED_REGISTERS..self.next_free_register_index) - .map(RegisterIndex::from) - .filter(|&index| !self.deallocated_registers.contains(&index)) - } - /// Creates a new register. pub(crate) fn allocate_register(&mut self) -> RegisterIndex { // If we have a register in our free list of deallocated registers, diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 92bbe21b20d..c9e9d95f4da 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -25,7 +25,7 @@ use self::{abi_gen::gen_abi, acir_gen::GeneratedAcir, ssa_gen::Ssa}; pub mod abi_gen; mod acir_gen; -mod function_builder; +pub(super) mod function_builder; pub mod ir; mod opt; pub mod ssa_gen; diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 1a58386c139..d5eef00d461 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -1059,10 +1059,7 @@ impl Context { // Casting into a Field as a no-op Ok(variable) } - NumericType::Unsigned { bit_size } => { - if incoming_type.is_signed() { - todo!("Cast from unsigned to signed") - } + NumericType::Unsigned { bit_size } | NumericType::Signed { bit_size } => { let max_bit_size = incoming_type.bit_size(); if max_bit_size <= *bit_size { // Incoming variable already fits into target bit size - this is a no-op @@ -1070,7 +1067,6 @@ impl Context { } self.acir_context.truncate_var(variable, *bit_size, max_bit_size) } - NumericType::Signed { .. } => todo!("Cast into signed"), } } diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index e7f8131bc35..bba7f40d721 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -244,6 +244,8 @@ impl<'a> FunctionContext<'a> { fn insert_shift_left(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { let base = self.builder.field_constant(FieldElement::from(2_u128)); let pow = self.pow(base, rhs); + let typ = self.builder.current_function.dfg.type_of_value(lhs); + let pow = self.builder.insert_cast(pow, typ); self.builder.insert_binary(lhs, BinaryOp::Mul, pow) } diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index 9b695eb3e59..285431d2040 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::fmt::Display; use crate::token::{Attributes, Token}; @@ -688,10 +689,12 @@ impl Display for FunctionDefinition { } impl FunctionReturnType { - pub fn get_type(&self) -> &UnresolvedTypeData { + pub fn get_type(&self) -> Cow { match self { - FunctionReturnType::Default(_span) => &UnresolvedTypeData::Unit, - FunctionReturnType::Ty(typ) => &typ.typ, + FunctionReturnType::Default(span) => { + Cow::Owned(UnresolvedType { typ: UnresolvedTypeData::Unit, span: Some(*span) }) + } + FunctionReturnType::Ty(typ) => Cow::Borrowed(typ), } } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 11321d673a7..407dafc0e38 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -9,13 +9,14 @@ use crate::hir::resolution::{ import::{resolve_imports, ImportDirective}, path_resolver::StandardPathResolver, }; -use crate::hir::type_check::{type_check_func, TypeChecker}; +use crate::hir::type_check::{type_check_func, TypeCheckError, TypeChecker}; use crate::hir::Context; +use crate::hir_def::traits::{TraitConstant, TraitFunction, TraitType}; use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TraitId, TypeAliasId}; use crate::{ - ExpressionKind, FunctionReturnType, Generics, Ident, LetStatement, Literal, NoirFunction, - NoirStruct, NoirTrait, NoirTypeAlias, ParsedModule, Shared, StructType, TraitItem, - TraitItemType, Type, TypeBinding, UnresolvedGenerics, UnresolvedType, + ExpressionKind, Generics, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, + NoirTypeAlias, ParsedModule, Shared, StructType, TraitItem, Type, TypeBinding, + TypeVariableKind, UnresolvedGenerics, UnresolvedType, }; use fm::FileId; use iter_extended::vecmap; @@ -244,8 +245,8 @@ impl DefCollector { // Type check all of the functions in the crate type_check_functions(&mut context.def_interner, file_func_ids, errors); - type_check_functions(&mut context.def_interner, file_trait_impls_ids, errors); type_check_functions(&mut context.def_interner, file_method_ids, errors); + type_check_functions(&mut context.def_interner, file_trait_impls_ids, errors); } } @@ -458,7 +459,7 @@ fn resolve_trait_types( _crate_id: CrateId, _unresolved_trait: &UnresolvedTrait, _errors: &mut [FileDiagnostic], -) -> Vec { +) -> Vec { // TODO vec![] } @@ -467,17 +468,18 @@ fn resolve_trait_constants( _crate_id: CrateId, _unresolved_trait: &UnresolvedTrait, _errors: &mut [FileDiagnostic], -) -> Vec { +) -> Vec { // TODO vec![] } fn resolve_trait_methods( context: &mut Context, + trait_id: TraitId, crate_id: CrateId, unresolved_trait: &UnresolvedTrait, errors: &mut Vec, -) -> Vec { +) -> Vec { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; @@ -499,19 +501,23 @@ fn resolve_trait_methods( body: _, } = item { + let the_trait = interner.get_trait(trait_id); + let self_type = Type::TypeVariable( + the_trait.borrow().self_type_typevar.clone(), + TypeVariableKind::Normal, + ); + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.set_self_type(Some(self_type)); + let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); - let resolved_return_type = match return_type { - FunctionReturnType::Default(_) => None, - FunctionReturnType::Ty(unresolved_type) => { - Some(resolver.resolve_type(unresolved_type.clone())) - } - }; + let resolved_return_type = resolver.resolve_type(return_type.get_type().into_owned()); + let name = name.clone(); // TODO let generics: Generics = vec![]; let span: Span = name.span(); - let f = TraitItemType::Function { + let f = TraitFunction { name, generics, arguments, @@ -552,16 +558,16 @@ fn resolve_traits( context.def_interner.push_empty_trait(*trait_id, unresolved_trait); } for (trait_id, unresolved_trait) in traits { - let mut items: Vec = vec![]; // Resolve order - // 1. Trait Types ( Trait contants can have a trait type, therefore types before constants) - items.append(&mut resolve_trait_types(context, crate_id, &unresolved_trait, errors)); - // 2. Trait Constants ( Trait's methods can use trait types & constants, threfore they should be after) - items.append(&mut resolve_trait_constants(context, crate_id, &unresolved_trait, errors)); + // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) + let _ = resolve_trait_types(context, crate_id, &unresolved_trait, errors); + // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) + let _ = resolve_trait_constants(context, crate_id, &unresolved_trait, errors); // 3. Trait Methods - items.append(&mut resolve_trait_methods(context, crate_id, &unresolved_trait, errors)); + let methods = resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait, errors); + context.def_interner.update_trait(trait_id, |trait_def| { - trait_def.set_items(items); + trait_def.set_methods(methods); }); } } @@ -690,6 +696,12 @@ fn resolve_trait_impls( errors, ); + let mut new_resolver = + Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); + new_resolver.set_self_type(Some(self_type.clone())); + + check_methods_signatures(&mut new_resolver, &impl_methods, trait_id, errors); + let trait_definition_ident = &trait_impl.trait_impl_ident; let key = (self_type.clone(), trait_id); if let Some(prev_trait_impl_ident) = interner.get_previous_trait_implementation(&key) { @@ -709,6 +721,88 @@ fn resolve_trait_impls( methods } + +// TODO(vitkov): Move this out of here and into type_check +fn check_methods_signatures( + resolver: &mut Resolver, + impl_methods: &Vec<(FileId, FuncId)>, + trait_id: TraitId, + errors: &mut Vec, +) { + let the_trait_shared = resolver.interner.get_trait(trait_id); + let the_trait = the_trait_shared.borrow(); + + let self_type = resolver.get_self_type().expect("trait impl must have a Self type"); + + // Temporarily bind the trait's Self type to self_type so we can type check + let _ = the_trait.self_type_typevar.borrow_mut().bind_to(self_type.clone(), the_trait.span); + + for (file_id, func_id) in impl_methods { + let meta = resolver.interner.function_meta(func_id); + let func_name = resolver.interner.function_name(func_id).to_owned(); + + let mut typecheck_errors = Vec::new(); + + // `method` is None in the case where the impl block has a method that's not part of the trait. + // If that's the case, a `MethodNotInTrait` error has already been thrown, and we can ignore + // the impl method, since there's nothing in the trait to match its signature against. + if let Some(method) = + the_trait.methods.iter().find(|method| method.name.0.contents == func_name) + { + let function_typ = meta.typ.instantiate(resolver.interner); + + if let Type::Function(params, _, _) = function_typ.0 { + if method.arguments.len() == params.len() { + // Check the parameters of the impl method against the parameters of the trait method + for (parameter_index, ((expected, actual), (hir_pattern, _, _))) in + method.arguments.iter().zip(¶ms).zip(&meta.parameters.0).enumerate() + { + expected.unify(actual, &mut typecheck_errors, || { + TypeCheckError::TraitMethodParameterTypeMismatch { + method_name: func_name.to_string(), + expected_typ: expected.to_string(), + actual_typ: actual.to_string(), + parameter_span: hir_pattern.span(), + parameter_index: parameter_index + 1, + } + }); + } + } else { + errors.push( + DefCollectorErrorKind::MismatchTraitImplementationNumParameters { + actual_num_parameters: meta.parameters.0.len(), + expected_num_parameters: method.arguments.len(), + trait_name: the_trait.name.to_string(), + method_name: func_name.to_string(), + span: meta.location.span, + } + .into_file_diagnostic(*file_id), + ); + } + } + + // Check that impl method return type matches trait return type: + let resolved_return_type = + resolver.resolve_type(meta.return_type.get_type().into_owned()); + + method.return_type.unify(&resolved_return_type, &mut typecheck_errors, || { + let ret_type_span = + meta.return_type.get_type().span.expect("return type must always have a span"); + + TypeCheckError::TypeMismatch { + expected_typ: method.return_type.to_string(), + expr_typ: meta.return_type().to_string(), + expr_span: ret_type_span, + } + }); + + extend_errors(errors, *file_id, typecheck_errors); + } + } + + the_trait.self_type_typevar.borrow_mut().unbind(the_trait.self_type_typevar_id); +} + fn resolve_free_functions( interner: &mut NodeInterner, crate_id: CrateId, diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 813c222319e..a72e30ea97e 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -1,3 +1,5 @@ +use std::collections::HashSet; + use fm::FileId; use noirc_errors::{FileDiagnostic, Location}; @@ -212,6 +214,9 @@ impl<'a> ModCollector<'a> { } } + // set of function ids that have a corresponding method in the trait + let mut func_ids_in_trait = HashSet::new(); + for item in &trait_def.items { // TODO(Maddiaa): Investigate trait implementations with attributes see: https://github.com/noir-lang/noir/issues/2629 if let TraitItem::Function { @@ -223,13 +228,19 @@ impl<'a> ModCollector<'a> { body, } = item { - let is_implemented = unresolved_functions + // List of functions in the impl block with the same name as the method + // `matching_fns.len() == 0` => missing method impl + // `matching_fns.len() > 1` => duplicate definition (collect_functions will throw a Duplicate error) + let matching_fns: Vec<_> = unresolved_functions .functions .iter() - .any(|(_, _, func_impl)| func_impl.name() == name.0.contents); - if !is_implemented { + .filter(|(_, _, func_impl)| func_impl.name() == name.0.contents) + .collect(); + + if matching_fns.is_empty() { match body { Some(body) => { + // if there's a default implementation for the method, use it let method_name = name.0.contents.clone(); let func_id = context.def_interner.push_empty_fn(); context.def_interner.push_function_definition(method_name, func_id); @@ -241,10 +252,11 @@ impl<'a> ModCollector<'a> { where_clause, return_type, )); + func_ids_in_trait.insert(func_id); unresolved_functions.push_fn(self.module_id, func_id, impl_method); } None => { - let error = DefCollectorErrorKind::TraitMissedMethodImplementation { + let error = DefCollectorErrorKind::TraitMissingMethod { trait_name: trait_def.name.clone(), method_name: name.clone(), trait_impl_span: trait_impl.object_type_span, @@ -252,9 +264,26 @@ impl<'a> ModCollector<'a> { errors.push(error.into_file_diagnostic(self.file_id)); } } + } else { + for (_, func_id, _) in &matching_fns { + func_ids_in_trait.insert(*func_id); + } } } } + + // Emit MethodNotInTrait error for methods in the impl block that + // don't have a corresponding method signature defined in the trait + for (_, func_id, func) in &unresolved_functions.functions { + if !func_ids_in_trait.contains(func_id) { + let error = DefCollectorErrorKind::MethodNotInTrait { + trait_name: trait_def.name.clone(), + impl_method: func.name_ident().clone(), + }; + errors.push(error.into_file_diagnostic(self.file_id)); + } + } + unresolved_functions } diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index ec5de088574..afd0599ef4e 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -1,6 +1,5 @@ use crate::hir::resolution::import::PathResolutionError; use crate::Ident; -use crate::UnresolvedType; use noirc_errors::CustomDiagnostic as Diagnostic; use noirc_errors::FileDiagnostic; @@ -34,21 +33,13 @@ pub enum DefCollectorErrorKind { NonStructTraitImpl { trait_ident: Ident, span: Span }, #[error("Cannot `impl` a type defined outside the current crate")] ForeignImpl { span: Span, type_name: String }, - #[error("Mismatch signature of trait")] - MismatchTraitImlementationParameter { - trait_name: String, - impl_method: String, - parameter: Ident, - expected_type: UnresolvedType, - }, - #[error("Mismatch return type of trait implementation")] - MismatchTraitImplementationReturnType { trait_name: String, impl_ident: Ident }, #[error("Mismatch number of parameters in of trait implementation")] MismatchTraitImplementationNumParameters { actual_num_parameters: usize, expected_num_parameters: usize, trait_name: String, - impl_ident: Ident, + method_name: String, + span: Span, }, #[error("Method is not defined in trait")] MethodNotInTrait { trait_name: Ident, impl_method: Ident }, @@ -57,7 +48,7 @@ pub enum DefCollectorErrorKind { #[error("Trait not found")] TraitNotFound { trait_ident: Ident }, #[error("Missing Trait method implementation")] - TraitMissedMethodImplementation { trait_name: Ident, method_name: Ident, trait_impl_span: Span }, + TraitMissingMethod { trait_name: Ident, method_name: Ident, trait_impl_span: Span }, } impl DefCollectorErrorKind { @@ -133,51 +124,25 @@ impl From for Diagnostic { "".to_string(), trait_ident.span(), ), - DefCollectorErrorKind::MismatchTraitImplementationReturnType { - trait_name, - impl_ident, - } => { - let span = impl_ident.span(); - let method_name = impl_ident.0.contents; - Diagnostic::simple_error( - format!("Mismatch return type of method with name {method_name} that implements trait {trait_name}"), - "".to_string(), - span, - ) - } DefCollectorErrorKind::MismatchTraitImplementationNumParameters { expected_num_parameters, actual_num_parameters, trait_name, - impl_ident, - } => { - let method_name = impl_ident.0.contents.clone(); - let primary_message = format!( - "Mismatch - expected {expected_num_parameters} arguments, but got {actual_num_parameters} of trait `{trait_name}` implementation `{method_name}`"); - Diagnostic::simple_error(primary_message, "".to_string(), impl_ident.span()) - } - DefCollectorErrorKind::MismatchTraitImlementationParameter { - trait_name, - impl_method, - parameter, - expected_type, + method_name, + span, } => { let primary_message = format!( - "Mismatch signature of method {impl_method} that implements trait {trait_name}" - ); - let secondary_message = - format!("`{}: {}` expected", parameter.0.contents, expected_type,); - let span = parameter.span(); - Diagnostic::simple_error(primary_message, secondary_message, span) + "Method `{method_name}` of trait `{trait_name}` needs {expected_num_parameters} parameters, but has {actual_num_parameters}"); + Diagnostic::simple_error(primary_message, "".to_string(), span) } DefCollectorErrorKind::MethodNotInTrait { trait_name, impl_method } => { let trait_name = trait_name.0.contents; let impl_method_span = impl_method.span(); let impl_method_name = impl_method.0.contents; - let primary_message = format!("method with name {impl_method_name} is not part of trait {trait_name}, therefore it can't be implemented"); + let primary_message = format!("Method with name `{impl_method_name}` is not part of trait `{trait_name}`, therefore it can't be implemented"); Diagnostic::simple_error(primary_message, "".to_owned(), impl_method_span) } - DefCollectorErrorKind::TraitMissedMethodImplementation { + DefCollectorErrorKind::TraitMissingMethod { trait_name, method_name, trait_impl_span, @@ -185,7 +150,7 @@ impl From for Diagnostic { let trait_name = trait_name.0.contents; let impl_method_name = method_name.0.contents; let primary_message = format!( - "method `{impl_method_name}` from trait `{trait_name}` is not implemented" + "Method `{impl_method_name}` from trait `{trait_name}` is not implemented" ); Diagnostic::simple_error( primary_message, diff --git a/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs b/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs index 9d7dfc458eb..e3a9735f936 100644 --- a/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs +++ b/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs @@ -10,6 +10,7 @@ use crate::{ ParsedModule, Path, PathKind, Pattern, Statement, UnresolvedType, UnresolvedTypeData, Visibility, }; +use crate::{PrefixExpression, UnaryOp}; use noirc_errors::FileDiagnostic; // @@ -55,8 +56,12 @@ fn call(func: Expression, arguments: Vec) -> Expression { expression(ExpressionKind::Call(Box::new(CallExpression { func: Box::new(func), arguments }))) } -fn mutable(pattern: &str) -> Pattern { - Pattern::Mutable(Box::new(Pattern::Identifier(ident(pattern))), Span::default()) +fn pattern(name: &str) -> Pattern { + Pattern::Identifier(ident(name)) +} + +fn mutable(name: &str) -> Pattern { + Pattern::Mutable(Box::new(pattern(name)), Span::default()) } fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { @@ -67,6 +72,21 @@ fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { }) } +fn mutable_reference(variable_name: &str) -> Expression { + expression(ExpressionKind::Prefix(Box::new(PrefixExpression { + operator: UnaryOp::MutableReference, + rhs: variable(variable_name), + }))) +} + +fn assignment(name: &str, assigned_to: Expression) -> Statement { + Statement::Let(LetStatement { + pattern: pattern(name), + r#type: make_type(UnresolvedTypeData::Unspecified), + expression: assigned_to, + }) +} + fn member_access(lhs: &str, rhs: &str) -> Expression { expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { lhs: variable(lhs), @@ -141,7 +161,9 @@ pub(crate) fn transform( // Covers all functions in the ast for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { - if transform_module(&mut submodule.contents.functions) { + let storage_defined = check_for_storage_definition(&submodule.contents); + + if transform_module(&mut submodule.contents.functions, storage_defined) { check_for_aztec_dependency(crate_id, context, errors); include_relevant_imports(&mut submodule.contents); } @@ -183,27 +205,37 @@ fn check_for_aztec_dependency( } } +// Check to see if the user has defined a storage struct +fn check_for_storage_definition(module: &ParsedModule) -> bool { + module.types.iter().any(|function| function.name.0.contents == "Storage") +} + /// Determines if the function is annotated with `aztec(private)` or `aztec(public)` /// If it is, it calls the `transform` function which will perform the required transformations. /// Returns true if an annotated function is found, false otherwise -fn transform_module(functions: &mut [NoirFunction]) -> bool { +fn transform_module(functions: &mut [NoirFunction], storage_defined: bool) -> bool { let mut has_annotated_functions = false; for func in functions.iter_mut() { for secondary_attribute in func.def.attributes.secondary.clone() { if let SecondaryAttribute::Custom(custom_attribute) = secondary_attribute { match custom_attribute.as_str() { "aztec(private)" => { - transform_function("Private", func); + transform_function("Private", func, storage_defined); has_annotated_functions = true; } "aztec(public)" => { - transform_function("Public", func); + transform_function("Public", func, storage_defined); has_annotated_functions = true; } _ => continue, } } } + // Add the storage struct to the beginning of the function if it is unconstrained in an aztec contract + if storage_defined && func.def.is_unconstrained { + transform_unconstrained(func); + has_annotated_functions = true; + } } has_annotated_functions } @@ -212,11 +244,17 @@ fn transform_module(functions: &mut [NoirFunction]) -> bool { /// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs /// - Hashes all of the function input variables /// - This instantiates a helper function -fn transform_function(ty: &str, func: &mut NoirFunction) { +fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) { let context_name = format!("{}Context", ty); let inputs_name = format!("{}ContextInputs", ty); let return_type_name = format!("{}CircuitPublicInputs", ty); + // Add access to the storage struct + if storage_defined { + let storage_def = abstract_storage(&ty.to_lowercase(), false); + func.def.body.0.insert(0, storage_def); + } + // Insert the context creation as the first action let create_context = create_context(&context_name, &func.def.parameters); func.def.body.0.splice(0..0, (create_context).iter().cloned()); @@ -247,6 +285,18 @@ fn transform_function(ty: &str, func: &mut NoirFunction) { } } +/// Transform Unconstrained +/// +/// Inserts the following code at the beginning of an unconstrained function +/// ```noir +/// let storage = Storage::init(Context::none()); +/// ``` +/// +/// This will allow developers to access their contract' storage struct in unconstrained functions +fn transform_unconstrained(func: &mut NoirFunction) { + func.def.body.0.insert(0, abstract_storage("Unconstrained", true)); +} + /// Helper function that returns what the private context would look like in the ast /// This should make it available to be consumed within aztec private annotated functions. /// @@ -413,6 +463,51 @@ fn abstract_return_values(func: &NoirFunction) -> Option { } } +/// Abstract storage +/// +/// For private functions: +/// ```noir +/// #[aztec(private)] +/// fn lol() { +/// let storage = Storage::init(Context::private(context)); +/// } +/// ``` +/// +/// For public functions: +/// ```noir +/// #[aztec(public)] +/// fn lol() { +/// let storage = Storage::init(Context::public(context)); +/// } +/// ``` +/// +/// For unconstrained functions: +/// ```noir +/// unconstrained fn lol() { +/// let storage = Storage::init(Context::none()); +/// } +fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { + let init_context_call = if unconstrained { + call( + variable_path(chained_path!("aztec", "context", "Context", "none")), // Path + vec![], // args + ) + } else { + call( + variable_path(chained_path!("aztec", "context", "Context", typ)), // Path + vec![mutable_reference("context")], // args + ) + }; + + assignment( + "storage", // Assigned to + call( + variable_path(chained_path!("Storage", "init")), // Path + vec![init_context_call], // args + ), + ) +} + /// Context Return Values /// /// Creates an instance to the context return values diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 411e91f2cf4..ddc2d38b2f7 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -17,6 +17,7 @@ use crate::hir_def::expr::{ HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, HirMemberAccess, HirMethodCallExpression, HirPrefixExpression, }; +use crate::hir_def::traits::Trait; use crate::token::PrimaryAttribute; use regex::Regex; use std::collections::{BTreeMap, HashSet}; @@ -35,9 +36,9 @@ use crate::{ }; use crate::{ ArrayLiteral, ContractFunctionType, Distinctness, Generics, LValue, NoirStruct, NoirTypeAlias, - Path, Pattern, Shared, StructType, Trait, Type, TypeAliasType, TypeBinding, TypeVariable, - UnaryOp, UnresolvedGenerics, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, - Visibility, ERROR_IDENT, + Path, Pattern, Shared, StructType, Type, TypeAliasType, TypeBinding, TypeVariable, UnaryOp, + UnresolvedGenerics, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, Visibility, + ERROR_IDENT, }; use fm::FileId; use iter_extended::vecmap; @@ -76,7 +77,7 @@ pub struct Resolver<'a> { scopes: ScopeForest, path_resolver: &'a dyn PathResolver, def_maps: &'a BTreeMap, - interner: &'a mut NodeInterner, + pub interner: &'a mut NodeInterner, errors: Vec, file: FileId, @@ -127,6 +128,10 @@ impl<'a> Resolver<'a> { self.self_type = self_type; } + pub fn get_self_type(&mut self) -> Option<&Type> { + self.self_type.as_ref() + } + fn push_err(&mut self, err: ResolverError) { self.errors.push(err); } diff --git a/compiler/noirc_frontend/src/hir/type_check/errors.rs b/compiler/noirc_frontend/src/hir/type_check/errors.rs index 3190c7a24a2..ece3a4c61ef 100644 --- a/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -100,6 +100,14 @@ pub enum TypeCheckError { ResolverError(ResolverError), #[error("Unused expression result of type {expr_type}")] UnusedResultError { expr_type: Type, expr_span: Span }, + #[error("Expected type {expected_typ:?} is not the same as {actual_typ:?}")] + TraitMethodParameterTypeMismatch { + method_name: String, + expected_typ: String, + actual_typ: String, + parameter_span: Span, + parameter_index: usize, + }, } impl TypeCheckError { @@ -133,6 +141,13 @@ impl From for Diagnostic { expr_span, ) } + TypeCheckError::TraitMethodParameterTypeMismatch { method_name, expected_typ, actual_typ, parameter_index, parameter_span } => { + Diagnostic::simple_error( + format!("Parameter #{parameter_index} of method `{method_name}` must be of type {expected_typ}, not {actual_typ}"), + String::new(), + parameter_span, + ) + } TypeCheckError::NonHomogeneousArray { first_span, first_type, diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 0407f79049f..5db23c3f4f2 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -297,6 +297,10 @@ impl<'interner> TypeChecker<'interner> { HirExpression::Prefix(_) => self .errors .push(TypeCheckError::InvalidUnaryOp { kind: annotated_type.to_string(), span }), + HirExpression::Infix(expr) => { + self.lint_overflowing_uint(&expr.lhs, annotated_type); + self.lint_overflowing_uint(&expr.rhs, annotated_type); + } _ => {} } } diff --git a/compiler/noirc_frontend/src/hir_def/mod.rs b/compiler/noirc_frontend/src/hir_def/mod.rs index 3dc2407486d..206fc3ddda5 100644 --- a/compiler/noirc_frontend/src/hir_def/mod.rs +++ b/compiler/noirc_frontend/src/hir_def/mod.rs @@ -18,4 +18,5 @@ pub mod expr; pub mod function; pub mod stmt; +pub mod traits; pub mod types; diff --git a/compiler/noirc_frontend/src/hir_def/stmt.rs b/compiler/noirc_frontend/src/hir_def/stmt.rs index 0dcb7192be2..d7f0d2e466f 100644 --- a/compiler/noirc_frontend/src/hir_def/stmt.rs +++ b/compiler/noirc_frontend/src/hir_def/stmt.rs @@ -79,6 +79,15 @@ impl HirPattern { other => panic!("Tried to iterate over the fields of '{other:?}', which has none"), } } + + pub fn span(&self) -> Span { + match self { + HirPattern::Identifier(ident) => ident.location.span, + HirPattern::Mutable(_, span) + | HirPattern::Tuple(_, span) + | HirPattern::Struct(_, _, span) => *span, + } + } } /// Represents an Ast form that can be assigned to. These diff --git a/compiler/noirc_frontend/src/hir_def/traits.rs b/compiler/noirc_frontend/src/hir_def/traits.rs new file mode 100644 index 00000000000..4176e4fc89b --- /dev/null +++ b/compiler/noirc_frontend/src/hir_def/traits.rs @@ -0,0 +1,122 @@ +use crate::{ + node_interner::{FuncId, TraitId}, + Generics, Ident, Type, TypeVariable, TypeVariableId, +}; +use noirc_errors::Span; + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitFunction { + pub name: Ident, + pub generics: Generics, + pub arguments: Vec, + pub return_type: Type, + pub span: Span, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitConstant { + pub name: Ident, + pub ty: Type, + pub span: Span, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitType { + pub name: Ident, + pub ty: Type, + pub span: Span, +} + +/// Represents a trait in the type system. Each instance of this struct +/// will be shared across all Type::Trait variants that represent +/// the same trait. +#[derive(Debug)] +pub struct Trait { + /// A unique id representing this trait type. Used to check if two + /// struct traits are equal. + pub id: TraitId, + + pub methods: Vec, + pub constants: Vec, + pub types: Vec, + + pub name: Ident, + pub generics: Generics, + pub span: Span, + + /// When resolving the types of Trait elements, all references to `Self` resolve + /// to this TypeVariable. Then when we check if the types of trait impl elements + /// match the definition in the trait, we bind this TypeVariable to whatever + /// the correct Self type is for that particular impl block. + pub self_type_typevar_id: TypeVariableId, + pub self_type_typevar: TypeVariable, +} + +pub struct TraitImpl { + pub ident: Ident, + pub typ: Type, + pub trait_id: TraitId, + pub methods: Vec, // methods[i] is the implementation of trait.methods[i] for Type typ +} + +#[derive(Debug, Clone)] +pub struct TraitConstraint { + pub typ: Type, + pub trait_id: Option, + // pub trait_generics: Generics, TODO +} + +impl std::hash::Hash for Trait { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} + +impl PartialEq for Trait { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl Trait { + pub fn new( + id: TraitId, + name: Ident, + span: Span, + generics: Generics, + self_type_typevar_id: TypeVariableId, + self_type_typevar: TypeVariable, + ) -> Trait { + Trait { + id, + name, + span, + methods: Vec::new(), + constants: Vec::new(), + types: Vec::new(), + generics, + self_type_typevar_id, + self_type_typevar, + } + } + + pub fn set_methods(&mut self, methods: Vec) { + self.methods = methods; + } +} + +impl std::fmt::Display for Trait { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name) + } +} + +impl TraitFunction { + pub fn get_type(&self) -> Type { + Type::Function( + self.arguments.clone(), + Box::new(self.return_type.clone()), + Box::new(Type::Unit), + ) + } +} diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 8372f7a0355..eb837ec5f55 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -12,7 +12,7 @@ use iter_extended::vecmap; use noirc_errors::Span; use noirc_printable_type::PrintableType; -use crate::{node_interner::StructId, node_interner::TraitId, Ident, Signedness}; +use crate::{node_interner::StructId, Ident, Signedness}; use super::expr::{HirCallExpression, HirExpression, HirIdent}; @@ -123,39 +123,6 @@ pub struct StructType { pub span: Span, } -#[derive(Debug, PartialEq, Eq)] -pub enum TraitItemType { - /// A function declaration in a trait. - Function { - name: Ident, - generics: Generics, - arguments: Vec, - return_type: Option, - span: Span, - }, - - /// A constant declaration in a trait. - Constant { name: Ident, ty: Type, span: Span }, - - /// A type declaration in a trait. - Type { name: Ident, ty: Type, span: Span }, -} -/// Represents a trait type in the type system. Each instance of this -/// rust struct will be shared across all Type::Trait variants that represent -/// the same trait type. -#[derive(Debug, Eq)] -pub struct Trait { - /// A unique id representing this trait type. Used to check if two - /// struct traits are equal. - pub id: TraitId, - - pub items: Vec, - - pub name: Ident, - pub generics: Generics, - pub span: Span, -} - /// Corresponds to generic lists such as `` in the source /// program. The `TypeVariableId` portion is used to match two /// type variables to check for equality, while the `TypeVariable` is @@ -174,40 +141,6 @@ impl PartialEq for StructType { } } -impl std::hash::Hash for Trait { - fn hash(&self, state: &mut H) { - self.id.hash(state); - } -} - -impl PartialEq for Trait { - fn eq(&self, other: &Self) -> bool { - self.id == other.id - } -} - -impl Trait { - pub fn new( - id: TraitId, - name: Ident, - span: Span, - items: Vec, - generics: Generics, - ) -> Trait { - Trait { id, name, span, items, generics } - } - - pub fn set_items(&mut self, items: Vec) { - self.items = items; - } -} - -impl std::fmt::Display for Trait { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.name) - } -} - impl StructType { pub fn new( id: StructId, @@ -462,6 +395,10 @@ impl TypeBinding { } } } + + pub fn unbind(&mut self, id: TypeVariableId) { + *self = TypeBinding::Unbound(id); + } } /// A unique ID used to differentiate different type variables diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index e0e404b48d8..10a007bdbdb 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -13,11 +13,12 @@ use crate::hir::def_collector::dc_crate::{ use crate::hir::def_map::{LocalModuleId, ModuleId}; use crate::hir::StorageSlot; use crate::hir_def::stmt::HirLetStatement; -use crate::hir_def::types::{StructType, Trait, Type}; +use crate::hir_def::types::{StructType, Type}; use crate::hir_def::{ expr::HirExpression, function::{FuncMeta, HirFunction}, stmt::HirStatement, + traits::Trait, }; use crate::token::Attributes; use crate::{ @@ -349,13 +350,15 @@ impl NodeInterner { } pub fn push_empty_trait(&mut self, type_id: TraitId, typ: &UnresolvedTrait) { + let self_type_typevar_id = self.next_type_variable_id(); + let self_type_typevar = Shared::new(TypeBinding::Unbound(self_type_typevar_id)); + self.traits.insert( type_id, Shared::new(Trait::new( type_id, typ.trait_def.name.clone(), typ.trait_def.span, - Vec::new(), vecmap(&typ.trait_def.generics, |_| { // Temporary type variable ids before the trait is resolved to its actual ids. // This lets us record how many arguments the type expects so that other types @@ -364,6 +367,8 @@ impl NodeInterner { let id = TypeVariableId(0); (id, Shared::new(TypeBinding::Unbound(id))) }), + self_type_typevar_id, + self_type_typevar, )), ); } diff --git a/compiler/source-resolver/.eslintignore b/compiler/source-resolver/.eslintignore new file mode 100644 index 00000000000..3c3629e647f --- /dev/null +++ b/compiler/source-resolver/.eslintignore @@ -0,0 +1 @@ +node_modules diff --git a/compiler/source-resolver/.eslintrc.js b/compiler/source-resolver/.eslintrc.js new file mode 100644 index 00000000000..33335c2a877 --- /dev/null +++ b/compiler/source-resolver/.eslintrc.js @@ -0,0 +1,3 @@ +module.exports = { + extends: ["../../.eslintrc.js"], +}; diff --git a/compiler/source-resolver/package.json b/compiler/source-resolver/package.json index 0014a5fa738..7295343b163 100644 --- a/compiler/source-resolver/package.json +++ b/compiler/source-resolver/package.json @@ -22,7 +22,8 @@ "build:web": "tsc -p tsconfig.esm.json", "build": "npm run clean-modules && npm run build:node && npm run build:web && npm run generate-types", "test": "ava", - "generate-types": "tsc src/*.ts --declaration --emitDeclarationOnly --outDir types" + "generate-types": "tsc src/*.ts --declaration --emitDeclarationOnly --outDir types", + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "devDependencies": { "@types/node": "^20.5.7", diff --git a/compiler/source-resolver/src/index.ts b/compiler/source-resolver/src/index.ts index 7d8e254f7db..e7e1db64b75 100644 --- a/compiler/source-resolver/src/index.ts +++ b/compiler/source-resolver/src/index.ts @@ -1,31 +1,37 @@ -let resolveFunction: Function | null = null; +let resolveFunction: ((source_id: string) => string) | null = null; -export let read_file = function (source_id: any): string { +export const read_file = function (source_id: string): string { + if (resolveFunction) { + const result = resolveFunction(source_id); - if (resolveFunction) { - - const result = resolveFunction(source_id); - - if (typeof result === "string") { - return result; - } else { - throw new Error("Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?"); - } + if (typeof result === "string") { + return result; } else { - throw new Error('Not yet initialized. Use initializeResolver(() => string)'); + throw new Error( + "Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?" + ); } - + } else { + throw new Error( + "Not yet initialized. Use initializeResolver(() => string)" + ); + } }; -function initialize(noir_resolver: (source_id: String) => string): (source_id: String) => string { - - if (typeof noir_resolver === "function") { - return noir_resolver; - } else { - throw new Error("Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter"); - } +function initialize( + noir_resolver: (source_id: string) => string +): (source_id: string) => string { + if (typeof noir_resolver === "function") { + return noir_resolver; + } else { + throw new Error( + "Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter" + ); + } } -export function initializeResolver(resolver: (source_id: String) => string): void { - resolveFunction = initialize(resolver); -} \ No newline at end of file +export function initializeResolver( + resolver: (source_id: string) => string +): void { + resolveFunction = initialize(resolver); +} diff --git a/compiler/source-resolver/src/index_node.ts b/compiler/source-resolver/src/index_node.ts index 396af74c058..016e118d044 100644 --- a/compiler/source-resolver/src/index_node.ts +++ b/compiler/source-resolver/src/index_node.ts @@ -1,20 +1,17 @@ /// -import { initializeResolver, read_file } from './index.js'; +import { initializeResolver, read_file } from "./index.js"; -initializeResolver((source_id: String) => { - let fileContent = ""; - try { - const fs = require("fs"); - fileContent = - fs.readFileSync(source_id, { encoding: "utf8" }) as string - ; - } catch (e) { - console.log(e); - } - return fileContent; +initializeResolver((source_id: string) => { + let fileContent = ""; + try { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const fs = require("fs"); + fileContent = fs.readFileSync(source_id, { encoding: "utf8" }) as string; + } catch (e) { + console.log(e); + } + return fileContent; }); export { initializeResolver, read_file }; - - diff --git a/compiler/source-resolver/types/index.d.ts b/compiler/source-resolver/types/index.d.ts index 22d9a98fa1f..ef4e98c87e1 100644 --- a/compiler/source-resolver/types/index.d.ts +++ b/compiler/source-resolver/types/index.d.ts @@ -1,2 +1,4 @@ -export declare let read_file: (source_id: any) => string; -export declare function initializeResolver(resolver: (source_id: String) => string): void; +export declare const read_file: (source_id: string) => string; +export declare function initializeResolver( + resolver: (source_id: string) => string +): void; diff --git a/compiler/source-resolver/types/index_node.d.ts b/compiler/source-resolver/types/index_node.d.ts index 3a109e47e73..2a645e138bb 100644 --- a/compiler/source-resolver/types/index_node.d.ts +++ b/compiler/source-resolver/types/index_node.d.ts @@ -1,2 +1,2 @@ -import { initializeResolver, read_file } from './index.js'; +import { initializeResolver, read_file } from "./index.js"; export { initializeResolver, read_file }; diff --git a/compiler/wasm/.eslintignore b/compiler/wasm/.eslintignore new file mode 100644 index 00000000000..3c3629e647f --- /dev/null +++ b/compiler/wasm/.eslintignore @@ -0,0 +1 @@ +node_modules diff --git a/compiler/wasm/.eslintrc.js b/compiler/wasm/.eslintrc.js new file mode 100644 index 00000000000..33335c2a877 --- /dev/null +++ b/compiler/wasm/.eslintrc.js @@ -0,0 +1,3 @@ +module.exports = { + extends: ["../../.eslintrc.js"], +}; diff --git a/compiler/wasm/README.md b/compiler/wasm/README.md index c5e8d54a836..0b2d92b0815 100644 --- a/compiler/wasm/README.md +++ b/compiler/wasm/README.md @@ -9,11 +9,11 @@ The package also handles dependency management like how Nargo (Noir's CLI tool) Outside of the [noir repo](https://github.com/noir-lang/noir), this package can be built using the command below: ```bash -nix build -L github:noir-lang/noir/master#wasm +nix build -L github:noir-lang/noir/master#noir_wasm ``` If you are within the noir repo and would like to build local changes, you can use: ```bash -nix build -L #wasm +nix build -L #noir_wasm ``` diff --git a/compiler/wasm/installPhase.sh b/compiler/wasm/installPhase.sh index a71b3f8cd0e..e5be98a3339 100755 --- a/compiler/wasm/installPhase.sh +++ b/compiler/wasm/installPhase.sh @@ -1,8 +1,10 @@ #!/usr/bin/env bash export self_path=$(dirname "$(readlink -f "$0")") -mkdir -p $out -cp $self_path/README.md $out/ -cp $self_path/package.json $out/ -cp -r $self_path/nodejs $out/ -cp -r $self_path/web $out/ +export out_path=$out/noir_wasm + +mkdir -p $out_path +cp $self_path/README.md $out_path/ +cp $self_path/package.json $out_path/ +cp -r $self_path/nodejs $out_path/ +cp -r $self_path/web $out_path/ diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index 2645ec4035c..4c2b2181d2a 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -22,7 +22,8 @@ "build": "bash ./build.sh", "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha", "test:node": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha", - "test:browser": "web-test-runner" + "test:browser": "web-test-runner", + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "peerDependencies": { "@noir-lang/source-resolver": "workspace:*" diff --git a/compiler/wasm/test/browser/index.test.ts b/compiler/wasm/test/browser/index.test.ts index 9b1649bf0c3..6f79a7ab60c 100644 --- a/compiler/wasm/test/browser/index.test.ts +++ b/compiler/wasm/test/browser/index.test.ts @@ -1,6 +1,10 @@ import { expect } from "@esm-bundle/chai"; import initNoirWasm from "@noir-lang/noir_wasm"; -import { compileNoirSource, nargoArtifactPath, noirSourcePath } from "../shared"; +import { + compileNoirSource, + nargoArtifactPath, + noirSourcePath, +} from "../shared"; beforeEach(async () => { await initNoirWasm(); @@ -13,7 +17,7 @@ async function getFileContent(path: string): Promise { } async function getSource(): Promise { - return getFileContent(noirSourcePath) + return getFileContent(noirSourcePath); } async function getPrecompiledSource(): Promise { @@ -29,7 +33,6 @@ describe("noir wasm compilation", () => { const cliCircuitBase64 = await getPrecompiledSource(); - expect(wasmCircuitBase64).to.equal(cliCircuitBase64); }).timeout(20e3); // 20 seconds }); diff --git a/compiler/wasm/test/index.d.ts b/compiler/wasm/test/index.d.ts index b64901ebda2..35743f263cf 100644 --- a/compiler/wasm/test/index.d.ts +++ b/compiler/wasm/test/index.d.ts @@ -1 +1 @@ -declare module '@noir-lang/source-resolver'; \ No newline at end of file +declare module "@noir-lang/source-resolver"; diff --git a/compiler/wasm/test/node/index.test.ts b/compiler/wasm/test/node/index.test.ts index f823db35944..c180fd6db39 100644 --- a/compiler/wasm/test/node/index.test.ts +++ b/compiler/wasm/test/node/index.test.ts @@ -34,7 +34,7 @@ describe("noir wasm compilation", () => { console.log( "Compilation is a match? ", - wasmCircuitBase64 === cliCircuitBase64, + wasmCircuitBase64 === cliCircuitBase64 ); expect(wasmCircuitBase64).to.equal(cliCircuitBase64); diff --git a/compiler/wasm/test/shared.ts b/compiler/wasm/test/shared.ts index 24a09cc1a04..6e5ad5969f3 100644 --- a/compiler/wasm/test/shared.ts +++ b/compiler/wasm/test/shared.ts @@ -5,10 +5,10 @@ export const noirSourcePath = "../../noir-script/src/main.nr"; export const nargoArtifactPath = "../../noir-script/target/noir_wasm_testing.json"; -export async function compileNoirSource(noir_source: string): Promise { +export async function compileNoirSource(noir_source: string): Promise { console.log("Compiling Noir source..."); - initializeResolver((id: String) => { + initializeResolver((id: string) => { console.log(`Resolving source ${id}`); const source = noir_source; diff --git a/flake.lock b/flake.lock index 659daec5d3a..e779473f0ec 100644 --- a/flake.lock +++ b/flake.lock @@ -1,28 +1,5 @@ { "nodes": { - "barretenberg": { - "inputs": { - "flake-utils": [ - "flake-utils" - ], - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1688820427, - "narHash": "sha256-w7yMeYp50KrlTn23TTKfYmLOQL4uIgw0wSX67v2tvvc=", - "owner": "AztecProtocol", - "repo": "barretenberg", - "rev": "fdd46f77531a6fcc9d9b24a698c56590d54d487e", - "type": "github" - }, - "original": { - "owner": "AztecProtocol", - "repo": "barretenberg", - "type": "github" - } - }, "crane": { "inputs": { "flake-compat": [ @@ -34,9 +11,7 @@ "nixpkgs": [ "nixpkgs" ], - "rust-overlay": [ - "rust-overlay" - ] + "rust-overlay": "rust-overlay" }, "locked": { "lastModified": 1681177078, @@ -52,6 +27,27 @@ "type": "github" } }, + "fenix": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ], + "rust-analyzer-src": "rust-analyzer-src" + }, + "locked": { + "lastModified": 1694499657, + "narHash": "sha256-u/fZtLtN7VcDrMMVrdsFy93PEkaiK+tNpJT9on4SGdU=", + "owner": "nix-community", + "repo": "fenix", + "rev": "2895ff377cbb3cb6f5dd92066734b0447cb04e20", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "fenix", + "type": "github" + } + }, "flake-compat": { "flake": false, "locked": { @@ -104,29 +100,47 @@ }, "root": { "inputs": { - "barretenberg": "barretenberg", "crane": "crane", + "fenix": "fenix", "flake-compat": "flake-compat", "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs", - "rust-overlay": "rust-overlay" + "nixpkgs": "nixpkgs" + } + }, + "rust-analyzer-src": { + "flake": false, + "locked": { + "lastModified": 1694421477, + "narHash": "sha256-df6YZzR57VFzkOPwIohJfC0fRwgq6yUPbMJkKAtQyAE=", + "owner": "rust-lang", + "repo": "rust-analyzer", + "rev": "cc6c8209cbaf7df55013977cf5cc8488d6b7ff1c", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "ref": "nightly", + "repo": "rust-analyzer", + "type": "github" } }, "rust-overlay": { "inputs": { "flake-utils": [ + "crane", "flake-utils" ], "nixpkgs": [ + "crane", "nixpkgs" ] }, "locked": { - "lastModified": 1681352318, - "narHash": "sha256-+kwy7bTsuW8GYrRqWRQ8T5hg6duZb5IJiHlKo1J+v9g=", + "lastModified": 1694484610, + "narHash": "sha256-aeSDkp7fkAqtVjW3QUn7vq7BKNlFul/BiGgdv7rK+mA=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "aeaa11c65a5c5cebaa51652353ab3c497b9a7bbf", + "rev": "c5b977a7e6a295697fa1f9c42174fd6313b38df4", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index f4c986f75da..d4d1f4c54fa 100644 --- a/flake.nix +++ b/flake.nix @@ -1,6 +1,8 @@ { description = "Build the Noir programming language"; + # All of these inputs (a.k.a. dependencies) need to align with inputs we + # use so they use the `inputs.*.follows` syntax to reference our inputs inputs = { nixpkgs = { url = "github:NixOS/nixpkgs/nixos-22.11"; @@ -15,131 +17,71 @@ flake = false; }; - rust-overlay = { - url = "github:oxalica/rust-overlay"; - # All of these inputs (a.k.a. dependencies) need to align with inputs we - # use so they use the `inputs.*.follows` syntax to reference our inputs + fenix = { + url = "github:nix-community/fenix"; inputs = { nixpkgs.follows = "nixpkgs"; - flake-utils.follows = "flake-utils"; }; }; crane = { url = "github:ipetkov/crane"; - # All of these inputs (a.k.a. dependencies) need to align with inputs we - # use so they use the `inputs.*.follows` syntax to reference our inputs inputs = { nixpkgs.follows = "nixpkgs"; flake-utils.follows = "flake-utils"; flake-compat.follows = "flake-compat"; - rust-overlay.follows = "rust-overlay"; - }; - }; - - barretenberg = { - url = "github:AztecProtocol/barretenberg"; - # All of these inputs (a.k.a. dependencies) need to align with inputs we - # use so they use the `inputs.*.follows` syntax to reference our inputs - inputs = { - nixpkgs.follows = "nixpkgs"; - flake-utils.follows = "flake-utils"; }; }; }; outputs = - { self, nixpkgs, crane, flake-utils, rust-overlay, barretenberg, ... }: + { self, nixpkgs, crane, flake-utils, fenix, ... }: flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; - overlays = [ - rust-overlay.overlays.default - barretenberg.overlays.default - ]; }; - rustToolchain = pkgs.rust-bin.stable."1.66.0".default.override { - # We include rust-src to ensure rust-analyzer works. - # See https://discourse.nixos.org/t/rust-src-not-found-and-other-misadventures-of-developing-rust-on-nixos/11570/4 - extensions = [ "rust-src" ]; - targets = [ "wasm32-unknown-unknown" ] - ++ pkgs.lib.optional (pkgs.hostPlatform.isx86_64 && pkgs.hostPlatform.isLinux) "x86_64-unknown-linux-gnu" - ++ pkgs.lib.optional (pkgs.hostPlatform.isAarch64 && pkgs.hostPlatform.isLinux) "aarch64-unknown-linux-gnu" - ++ pkgs.lib.optional (pkgs.hostPlatform.isx86_64 && pkgs.hostPlatform.isDarwin) "x86_64-apple-darwin" - ++ pkgs.lib.optional (pkgs.hostPlatform.isAarch64 && pkgs.hostPlatform.isDarwin) "aarch64-apple-darwin"; + rustToolchain = fenix.packages.${system}.fromToolchainFile { + file = ./rust-toolchain.toml; + sha256 = "sha256-Zk2rxv6vwKFkTTidgjPm6gDsseVmmljVt201H7zuDkk="; }; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; - sharedEnvironment = { - # We enable backtraces on any failure for help with debugging - RUST_BACKTRACE = "1"; - - BARRETENBERG_ARCHIVE = builtins.fetchurl { - url = "https://github.com/AztecProtocol/barretenberg/releases/download/barretenberg-v0.4.5/acvm_backend.wasm.tar.gz"; - sha256 = "sha256:0z24yhvxc0dr13xj7y4xs9p42lzxwpazrmsrdpcgynfajkk6vqy4"; - }; - }; - - nativeEnvironment = sharedEnvironment // { - # rust-bindgen needs to know the location of libclang - LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib"; - }; - - wasmEnvironment = sharedEnvironment // { - # We set the environment variable because barretenberg must be compiled in a special way for wasm - BARRETENBERG_BIN_DIR = "${pkgs.barretenberg-wasm}/bin"; - }; - - testEnvironment = sharedEnvironment // { }; - # The `self.rev` property is only available when the working tree is not dirty GIT_COMMIT = if (self ? rev) then self.rev else "unknown"; GIT_DIRTY = if (self ? rev) then "false" else "true"; - # We use `include_str!` macro to embed the solidity verifier template so we need to create a special - # source filter to include .sol files in addition to usual rust/cargo source files. - solidityFilter = path: _type: builtins.match ".*sol$" path != null; - # We use `.bytecode` and `.tr` files to test interactions with `bb` so we add a source filter to include these. - bytecodeFilter = path: _type: builtins.match ".*bytecode$" path != null; - witnessFilter = path: _type: builtins.match ".*tr$" path != null; - # We use `.nr` and `.toml` files in tests so we need to create a special source - # filter to include those files in addition to usual rust/cargo source files - noirFilter = path: _type: builtins.match ".*nr$" path != null; - tomlFilter = path: _type: builtins.match ".*toml$" path != null; - sourceFilter = path: type: - (solidityFilter path type) || (bytecodeFilter path type)|| (witnessFilter path type) || (noirFilter path type) || (tomlFilter path type) || (craneLib.filterCargoSources path type); - - # As per https://discourse.nixos.org/t/gcc11stdenv-and-clang/17734/7 since it seems that aarch64-linux uses - # gcc9 instead of gcc11 for the C++ stdlib, while all other targets we support provide the correct libstdc++ - stdenv = - if (pkgs.stdenv.targetPlatform.isGnu && pkgs.stdenv.targetPlatform.isAarch64) then - pkgs.overrideCC pkgs.llvmPackages.stdenv (pkgs.llvmPackages.clang.override { gccForLibs = pkgs.gcc11.cc; }) - else - pkgs.llvmPackages.stdenv; - - extraBuildInputs = pkgs.lib.optionals pkgs.stdenv.isDarwin [ + extraBuildInputs = [ ] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ # Need libiconv and apple Security on Darwin. See https://github.com/ipetkov/crane/issues/156 pkgs.libiconv pkgs.darwin.apple_sdk.frameworks.Security - ] ++ [ - # Need to install various packages used by the `bb` binary. - pkgs.curl - stdenv.cc.cc.lib - pkgs.gcc.cc.lib - pkgs.gzip ]; - sharedArgs = { + environment = { + # We enable backtraces on any failure for help with debugging + RUST_BACKTRACE = "1"; + + # We download the Wasm version of `acvm_backend` in the barretenberg releases for the ACVM `blackbox_solver` + BARRETENBERG_ARCHIVE = pkgs.fetchurl { + url = "https://github.com/AztecProtocol/barretenberg/releases/download/barretenberg-v0.4.5/acvm_backend.wasm.tar.gz"; + sha256 = "sha256-xONt5pTKWf/YbVnX/NXl/VNBbtKd+CP7CLkB1jf0RHw="; + }; + }; + + # Configuration shared between builds + config = { # x-release-please-start-version version = "0.12.0"; # x-release-please-end src = pkgs.lib.cleanSourceWith { src = craneLib.path ./.; - filter = sourceFilter; + # Custom filter with various file extensions that we rely upon to build packages + # Currently: `.nr`, `.sol`, `.sh`, `.json`, `.md` + filter = path: type: + (builtins.match ".*\.(nr|sol|sh|json|md)$" path != null) || (craneLib.filterCargoSources path type); }; # TODO(#1198): It'd be nice to include these flags when running `cargo clippy` in a devShell. @@ -149,131 +91,91 @@ cargoTestExtraArgs = "--workspace"; }; - # Combine the environment and other configuration needed for crane to build our Rust packages - nativeArgs = nativeEnvironment // sharedArgs // { - pname = "noir-native"; - - # Use our custom stdenv to build and test our Rust project - inherit stdenv; - - nativeBuildInputs = [ - # This provides the pkg-config tool to find barretenberg & other native libraries - pkgs.pkg-config - # This provides the `lld` linker to cargo - pkgs.llvmPackages.bintools - ] ++ pkgs.lib.optionals stdenv.isLinux [ - # This is linux specific and used to patch the rpath and interpreter of the bb binary - pkgs.patchelf - ]; + # Combine the environment and other configuration needed for Crane to build our Rust packages + nativeConfig = environment // config // { + nativeBuildInputs = [ ]; - buildInputs = [ - ] ++ extraBuildInputs; + buildInputs = [ ] ++ extraBuildInputs; }; - # Combine the environmnet with cargo args needed to build wasm package - noirWasmArgs = sharedEnvironment // sharedArgs // { - pname = "noir_wasm"; - - src = ./.; + # Combine the environmnet and other configuration needed for Crane to build our Wasm packages + wasmConfig = environment // config // { + CARGO_TARGET_DIR = "./target"; - cargoExtraArgs = "--package noir_wasm --target wasm32-unknown-unknown"; + nativeBuildInputs = with pkgs; [ + which + git + jq + rustToolchain + wasm-bindgen-cli + binaryen + toml2json + ]; buildInputs = [ ] ++ extraBuildInputs; - - doCheck = false; }; - # Combine the environment with cargo args needed to build wasm package - noirc_abi_WasmArgs = sharedEnvironment // sharedArgs // { + # Build *just* the cargo dependencies, so we can reuse all of that work between runs + native-cargo-artifacts = craneLib.buildDepsOnly (nativeConfig // { + pname = "nargo"; + }); + noir-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { + pname = "noir_wasm"; + }); + noirc-abi-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { pname = "noirc_abi_wasm"; + }); - src = ./.; + nargo = craneLib.buildPackage (nativeConfig // { + pname = "nargo"; - cargoExtraArgs = "--package noirc_abi_wasm --target wasm32-unknown-unknown"; + inherit GIT_COMMIT GIT_DIRTY; - buildInputs = [ ] ++ extraBuildInputs; + cargoArtifacts = native-cargo-artifacts; + # We don't want to run tests because they don't work in the Nix sandbox doCheck = false; - }; - - # Conditionally download the binary based on whether it is linux or mac - bb_binary = let - platformSpecificUrl = if stdenv.hostPlatform.isLinux then - "https://github.com/AztecProtocol/barretenberg/releases/download/barretenberg-v0.4.3/bb-ubuntu.tar.gz" - else if stdenv.hostPlatform.isDarwin then - "https://github.com/AztecProtocol/barretenberg/releases/download/barretenberg-v0.4.3/barretenberg-x86_64-apple-darwin.tar.gz" - else - throw "Unsupported platform"; - - platformSpecificHash = if stdenv.hostPlatform.isLinux then - "sha256:0rcsjws87f4v28cw9734c10pg7c49apigf4lg3m0ji5vbhhmfnhr" - else if stdenv.hostPlatform.isDarwin then - "sha256:0pnsd56z0vkai7m0advawfgcvq9jbnpqm7lk98n5flqj583x3w35" - else - throw "Unsupported platform"; - in builtins.fetchurl { - url = platformSpecificUrl; - sha256 = platformSpecificHash; - }; + }); + + noir_wasm = craneLib.buildPackage (wasmConfig // rec { + pname = "noir_wasm"; + + inherit GIT_COMMIT GIT_DIRTY; + + cargoArtifacts = noir-wasm-cargo-artifacts; - # The `port` is parameterized to support parallel test runs without colliding static servers - testArgs = port: testEnvironment // { - BB_BINARY_PATH = "/tmp/backend_binary"; - - BB_BINARY_URL = "http://0.0.0.0:${toString port}/${builtins.baseNameOf bb_binary}"; - - # We provide `barretenberg-transcript00` from the overlay to the tests as a URL hosted via a static server - # This is necessary because the Nix sandbox has no network access and downloading during tests would fail - BARRETENBERG_TRANSCRIPT_URL = "http://0.0.0.0:${toString port}/${builtins.baseNameOf pkgs.barretenberg-transcript00}"; - - # This copies the `barretenberg-transcript00` from the Nix store into this sandbox - # which avoids exposing the entire Nix store to the static server it starts - # The static server is moved to the background and killed after checks are completed - # - # We also set the NARGO_BACKEND_CACHE_DIR environment variable to the $TMP directory so we can successfully cache - # the transcript; which isn't possible with the default path because the Nix sandbox disabled $HOME - preCheck = '' - echo "Extracting bb binary" - mkdir extracted - tar -xf ${bb_binary} -C extracted - - # Conditionally patch the binary for Linux - ${if stdenv.hostPlatform.isLinux then '' - - cp extracted/cpp/build/bin/bb /tmp/backend_binary - - echo "Patching bb binary for Linux" - patchelf --set-rpath "${stdenv.cc.cc.lib}/lib:${pkgs.gcc.cc.lib}/lib" /tmp/backend_binary - patchelf --set-interpreter ${stdenv.cc.libc}/lib/ld-linux-x86-64.so.2 /tmp/backend_binary - '' else if stdenv.hostPlatform.isDarwin then '' - cp extracted/bb /tmp/backend_binary - '' else - throw "Unsupported platform" - } - - export NARGO_BACKEND_CACHE_DIR=$TMP - cp ${pkgs.barretenberg-transcript00} . - echo "Starting simple static server" - ${pkgs.simple-http-server}/bin/simple-http-server --port ${toString port} --silent & - HTTP_SERVER_PID=$! + cargoExtraArgs = "--package ${pname} --target wasm32-unknown-unknown"; + + buildPhaseCargoCommand = '' + bash compiler/wasm/buildPhaseCargoCommand.sh release ''; - postCheck = '' - kill $HTTP_SERVER_PID + installPhase = '' + bash compiler/wasm/installPhase.sh ''; - }; - # Build *just* the cargo dependencies, so we can reuse all of that work between runs - native-cargo-artifacts = craneLib.buildDepsOnly nativeArgs; - noir-wasm-cargo-artifacts = craneLib.buildDepsOnly noirWasmArgs; - noirc-abi-wasm-cargo-artifacts = craneLib.buildDepsOnly noirc_abi_WasmArgs; + # We don't want to run tests because they don't work in the Nix sandbox + doCheck = false; + }); + + noirc_abi_wasm = craneLib.buildPackage (wasmConfig // rec { + pname = "noirc_abi_wasm"; - noir-native = craneLib.buildPackage (nativeArgs // { inherit GIT_COMMIT GIT_DIRTY; - cargoArtifacts = native-cargo-artifacts; + cargoArtifacts = noirc-abi-wasm-cargo-artifacts; - # We don't want to run checks or tests when just building the project + cargoExtraArgs = "--package ${pname} --target wasm32-unknown-unknown"; + + buildPhaseCargoCommand = '' + bash tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh release + ''; + + installPhase = '' + bash tooling/noirc_abi_wasm/installPhase.sh + ''; + + # We don't want to run tests because they don't work in the Nix sandbox doCheck = false; }); @@ -284,22 +186,20 @@ }; }; in - rec { + { + # We use `checks` to run `cargo clippy` and `cargo fmt` since we disable checks in the primary derivations checks = { - cargo-clippy = craneLib.cargoClippy (nativeArgs // { - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = native-cargo-artifacts; - }); + cargo-clippy = craneLib.cargoClippy (nativeConfig // { + pname = "noir"; - cargo-fmt = craneLib.cargoFmt (nativeArgs // { inherit GIT_COMMIT GIT_DIRTY; cargoArtifacts = native-cargo-artifacts; - doCheck = true; }); - cargo-test = craneLib.cargoTest (nativeArgs // (testArgs 8000) // { + cargo-fmt = craneLib.cargoFmt (nativeConfig // { + pname = "noir"; + inherit GIT_COMMIT GIT_DIRTY; cargoArtifacts = native-cargo-artifacts; @@ -307,9 +207,16 @@ }; packages = { - default = noir-native; + default = nargo; - inherit noir-native; + # Nix flakes cannot build more than one derivation in one command (see https://github.com/NixOS/nix/issues/5591) + # so we use `symlinkJoin` to build everything as the "all" package. + all = pkgs.symlinkJoin { name = "all"; paths = [ nargo noir_wasm noirc_abi_wasm ]; }; + + # We also export individual packages to enable `nix build .#nargo -L`, etc. + inherit nargo; + inherit noir_wasm; + inherit noirc_abi_wasm; # We expose the `*-cargo-artifacts` derivations so we can cache our cargo dependencies in CI inherit native-cargo-artifacts; @@ -317,111 +224,37 @@ inherit noirc-abi-wasm-cargo-artifacts; }; - # TODO(#1197): Look into installable apps with Nix flakes - # apps.default = flake-utils.lib.mkApp { drv = nargo; }; - - # Setup the environment to match the stdenv from `nix build` & `nix flake check`, and - # combine it with the environment settings, the inputs from our checks derivations, + # Setup the environment to match the environment settings, the inputs from our checks derivations, # and extra tooling via `nativeBuildInputs` - devShells.default = pkgs.mkShell.override { inherit stdenv; } (nativeEnvironment // wasmEnvironment // testEnvironment // { - inputsFrom = builtins.attrValues checks; + devShells.default = pkgs.mkShell (environment // { + inputsFrom = [ + nargo + noir_wasm + noirc_abi_wasm + ]; + # Additional tools that weren't included as `nativeBuildInputs` of any of the derivations in `inputsFrom` nativeBuildInputs = with pkgs; [ + # Rust toolchain + rustToolchain + # Other tools + starship + yarn + nodejs-18_x + # Used by the `bb` binary curl gzip - which - starship - git + # This ensures the right lldb is in the environment for running rust-lldb + llvmPackages.lldb + # Nix tools nil nixpkgs-fmt - toml2json - llvmPackages.lldb # This ensures the right lldb is in the environment for running rust-lldb - wasm-bindgen-cli - jq - binaryen - yarn - # rust-bin.stable."1.66.1".default - rustToolchain - rust-analyzer - rustup - nodejs-18_x ]; shellHook = '' eval "$(starship init bash)" ''; }); - - # TODO: This fails with a "section too large" error on MacOS so we should limit to linux targets - # or fix the failure - packages.wasm = craneLib.buildPackage (noirWasmArgs // { - - inherit GIT_COMMIT; - inherit GIT_DIRTY; - doCheck = false; - - cargoArtifacts = noir-wasm-cargo-artifacts; - - COMMIT_SHORT = builtins.substring 0 7 GIT_COMMIT; - VERSION_APPENDIX = if GIT_DIRTY == "true" then "-dirty" else ""; - PKG_PATH = "./pkg"; - CARGO_TARGET_DIR = "./target"; - - nativeBuildInputs = with pkgs; [ - which - git - jq - rustToolchain - wasm-bindgen-cli - binaryen - toml2json - ]; - - buildPhaseCargoCommand = '' - bash compiler/wasm/buildPhaseCargoCommand.sh release - ''; - - installPhase = '' - bash compiler/wasm/installPhase.sh - ''; - - }); - - # TODO: This fails with a "section too large" error on MacOS so we should limit to linux targets - # or fix the failure - packages.noirc_abi_wasm = craneLib.buildPackage (noirc_abi_WasmArgs // { - - inherit GIT_COMMIT; - inherit GIT_DIRTY; - doCheck = false; - - cargoArtifacts = noirc-abi-wasm-cargo-artifacts; - - COMMIT_SHORT = builtins.substring 0 7 GIT_COMMIT; - VERSION_APPENDIX = if GIT_DIRTY == "true" then "-dirty" else ""; - PKG_PATH = "./pkg"; - CARGO_TARGET_DIR = "./target"; - - nativeBuildInputs = with pkgs; [ - which - git - jq - rustToolchain - wasm-bindgen-cli - binaryen - toml2json - ]; - - buildPhaseCargoCommand = '' - bash tooling/noirc_abi_wasm/buildPhaseCargoCommand.sh release - ''; - - installPhase = '' - bash tooling/noirc_abi_wasm/installPhase.sh - ''; - - }); - }); } diff --git a/package.json b/package.json index 873ced5f3d1..abfa7f270ba 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,8 @@ "scripts": { "build": "yarn workspaces foreach run build", "test": "yarn workspaces foreach run test", - "test:integration": "yarn workspace integration-tests test" + "test:integration": "yarn workspace integration-tests test", + "lint": "yarn workspaces foreach run lint" }, "devDependencies": { "@typescript-eslint/eslint-plugin": "^5.59.5", diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 00000000000..870d0089548 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,5 @@ +[toolchain] +channel = "1.66.1" +components = [ "rust-src" ] +targets = [ "wasm32-unknown-unknown", "aarch64-apple-darwin" ] +profile = "default" diff --git a/tooling/acvm_backend_barretenberg/.gitignore b/tooling/acvm_backend_barretenberg/.gitignore deleted file mode 100644 index 106a4f552a0..00000000000 --- a/tooling/acvm_backend_barretenberg/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!src/witness.tr diff --git a/tooling/acvm_backend_barretenberg/CHANGELOG.md b/tooling/backend_interface/CHANGELOG.md similarity index 100% rename from tooling/acvm_backend_barretenberg/CHANGELOG.md rename to tooling/backend_interface/CHANGELOG.md diff --git a/tooling/acvm_backend_barretenberg/Cargo.toml b/tooling/backend_interface/Cargo.toml similarity index 78% rename from tooling/acvm_backend_barretenberg/Cargo.toml rename to tooling/backend_interface/Cargo.toml index a86fa90b0be..127770c5722 100644 --- a/tooling/acvm_backend_barretenberg/Cargo.toml +++ b/tooling/backend_interface/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "acvm-backend-barretenberg" -description = "An ACVM backend which allows proving/verifying ACIR circuits against Aztec Lab's Barretenberg library." +name = "backend-interface" +description = "The definition of the backend CLI interface which Nargo uses for proving/verifying ACIR circuits." version = "0.11.0" authors.workspace = true edition.workspace = true @@ -15,6 +15,7 @@ dirs.workspace = true thiserror.workspace = true serde.workspace = true serde_json.workspace = true +bb_abstraction_leaks.workspace = true tempfile = "3.6.0" diff --git a/tooling/acvm_backend_barretenberg/src/cli/contract.rs b/tooling/backend_interface/src/cli/contract.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/cli/contract.rs rename to tooling/backend_interface/src/cli/contract.rs diff --git a/tooling/acvm_backend_barretenberg/src/cli/gates.rs b/tooling/backend_interface/src/cli/gates.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/cli/gates.rs rename to tooling/backend_interface/src/cli/gates.rs diff --git a/tooling/acvm_backend_barretenberg/src/cli/info.rs b/tooling/backend_interface/src/cli/info.rs similarity index 56% rename from tooling/acvm_backend_barretenberg/src/cli/info.rs rename to tooling/backend_interface/src/cli/info.rs index 5d9e662b6b7..d015284e704 100644 --- a/tooling/acvm_backend_barretenberg/src/cli/info.rs +++ b/tooling/backend_interface/src/cli/info.rs @@ -1,10 +1,9 @@ -use acvm::acir::circuit::opcodes::Opcode; use acvm::Language; use serde::Deserialize; use std::collections::HashSet; use std::path::{Path, PathBuf}; -use crate::BackendError; +use crate::{BackendError, BackendOpcodeSupport}; pub(crate) struct InfoCommand { pub(crate) crs_path: PathBuf, @@ -23,11 +22,20 @@ struct LanguageResponse { width: Option, } +impl BackendOpcodeSupport { + fn new(info: InfoResponse) -> Self { + let opcodes: HashSet = info.opcodes_supported.into_iter().collect(); + let black_box_functions: HashSet = + info.black_box_functions_supported.into_iter().collect(); + Self { opcodes, black_box_functions } + } +} + impl InfoCommand { pub(crate) fn run( self, binary_path: &Path, - ) -> Result<(Language, Box bool>), BackendError> { + ) -> Result<(Language, BackendOpcodeSupport), BackendError> { let mut command = std::process::Command::new(binary_path); command.arg("info").arg("-c").arg(self.crs_path).arg("-o").arg("-"); @@ -49,24 +57,7 @@ impl InfoCommand { _ => panic!("Unknown langauge"), }; - let opcodes_set: HashSet = backend_info.opcodes_supported.into_iter().collect(); - let black_box_functions_set: HashSet = - backend_info.black_box_functions_supported.into_iter().collect(); - - let is_opcode_supported = move |opcode: &Opcode| -> bool { - match opcode { - Opcode::Arithmetic(_) => opcodes_set.contains("arithmetic"), - Opcode::Directive(_) => opcodes_set.contains("directive"), - Opcode::Brillig(_) => opcodes_set.contains("brillig"), - Opcode::MemoryInit { .. } => opcodes_set.contains("memory_init"), - Opcode::MemoryOp { .. } => opcodes_set.contains("memory_op"), - Opcode::BlackBoxFuncCall(func) => { - black_box_functions_set.contains(func.get_black_box_func().name()) - } - } - }; - - Ok((language, Box::new(is_opcode_supported))) + Ok((language, BackendOpcodeSupport::new(backend_info))) } } @@ -79,10 +70,10 @@ fn info_command() -> Result<(), BackendError> { let backend = crate::get_mock_backend()?; let crs_path = backend.backend_directory(); - let (language, is_opcode_supported) = InfoCommand { crs_path }.run(backend.binary_path())?; + let (language, opcode_support) = InfoCommand { crs_path }.run(backend.binary_path())?; assert!(matches!(language, Language::PLONKCSat { width: 3 })); - assert!(is_opcode_supported(&Opcode::Arithmetic(Expression::default()))); + assert!(opcode_support.is_opcode_supported(&Opcode::Arithmetic(Expression::default()))); Ok(()) } diff --git a/tooling/acvm_backend_barretenberg/src/cli/mod.rs b/tooling/backend_interface/src/cli/mod.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/cli/mod.rs rename to tooling/backend_interface/src/cli/mod.rs diff --git a/tooling/acvm_backend_barretenberg/src/cli/prove.rs b/tooling/backend_interface/src/cli/prove.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/cli/prove.rs rename to tooling/backend_interface/src/cli/prove.rs diff --git a/tooling/acvm_backend_barretenberg/src/cli/verify.rs b/tooling/backend_interface/src/cli/verify.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/cli/verify.rs rename to tooling/backend_interface/src/cli/verify.rs diff --git a/tooling/acvm_backend_barretenberg/src/cli/write_vk.rs b/tooling/backend_interface/src/cli/write_vk.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/cli/write_vk.rs rename to tooling/backend_interface/src/cli/write_vk.rs diff --git a/tooling/acvm_backend_barretenberg/src/download.rs b/tooling/backend_interface/src/download.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/src/download.rs rename to tooling/backend_interface/src/download.rs diff --git a/tooling/acvm_backend_barretenberg/src/lib.rs b/tooling/backend_interface/src/lib.rs similarity index 73% rename from tooling/acvm_backend_barretenberg/src/lib.rs rename to tooling/backend_interface/src/lib.rs index e3fc4865d6d..6d18b6b6ead 100644 --- a/tooling/acvm_backend_barretenberg/src/lib.rs +++ b/tooling/backend_interface/src/lib.rs @@ -1,17 +1,18 @@ #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] -use std::path::PathBuf; +use std::{collections::HashSet, path::PathBuf}; mod cli; mod download; mod proof_system; mod smart_contract; +use acvm::acir::circuit::Opcode; +use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; pub use download::download_backend; const BACKENDS_DIR: &str = ".nargo/backends"; -pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; pub fn backends_directory() -> PathBuf { let home_directory = dirs::home_dir().unwrap(); @@ -64,6 +65,10 @@ impl Backend { Backend { name, binary_path } } + pub fn name(&self) -> &str { + &self.name + } + fn binary_path(&self) -> &PathBuf { &self.binary_path } @@ -76,7 +81,7 @@ impl Backend { if self.name == ACVM_BACKEND_BARRETENBERG { // If we're trying to use barretenberg, automatically go and install it. let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| env!("BB_BINARY_URL").to_string()); + .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; return Ok(binary_path); } @@ -96,6 +101,26 @@ impl Backend { } } +pub struct BackendOpcodeSupport { + opcodes: HashSet, + black_box_functions: HashSet, +} + +impl BackendOpcodeSupport { + pub fn is_opcode_supported(&self, opcode: &Opcode) -> bool { + match opcode { + Opcode::Arithmetic(_) => self.opcodes.contains("arithmetic"), + Opcode::Directive(_) => self.opcodes.contains("directive"), + Opcode::Brillig(_) => self.opcodes.contains("brillig"), + Opcode::MemoryInit { .. } => self.opcodes.contains("memory_init"), + Opcode::MemoryOp { .. } => self.opcodes.contains("memory_op"), + Opcode::BlackBoxFuncCall(func) => { + self.black_box_functions.contains(func.get_black_box_func().name()) + } + } + } +} + #[cfg(test)] mod backend { use crate::{Backend, BackendError}; diff --git a/tooling/acvm_backend_barretenberg/src/proof_system.rs b/tooling/backend_interface/src/proof_system.rs similarity index 72% rename from tooling/acvm_backend_barretenberg/src/proof_system.rs rename to tooling/backend_interface/src/proof_system.rs index a2700171abf..ffdb7531ed1 100644 --- a/tooling/acvm_backend_barretenberg/src/proof_system.rs +++ b/tooling/backend_interface/src/proof_system.rs @@ -2,14 +2,13 @@ use std::fs::File; use std::io::Write; use std::path::Path; -use acvm::acir::circuit::Opcode; use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; use acvm::FieldElement; use acvm::Language; use tempfile::tempdir; use crate::cli::{GatesCommand, InfoCommand, ProveCommand, VerifyCommand, WriteVkCommand}; -use crate::{Backend, BackendError}; +use crate::{Backend, BackendError, BackendOpcodeSupport}; impl Backend { pub fn get_exact_circuit_size(&self, circuit: &Circuit) -> Result { @@ -27,9 +26,7 @@ impl Backend { .run(binary_path) } - pub fn get_backend_info( - &self, - ) -> Result<(Language, Box bool>), BackendError> { + pub fn get_backend_info(&self) -> Result<(Language, BackendOpcodeSupport), BackendError> { let binary_path = self.assert_binary_exists()?; InfoCommand { crs_path: self.crs_directory() }.run(binary_path) } @@ -66,15 +63,10 @@ impl Backend { } .run(binary_path)?; - // Barretenberg return the proof prepended with the public inputs. - // - // This is not how the API expects the proof to be formatted, - // so we remove the public inputs from the proof. - // - // TODO: As noted in the verification procedure, this is an abstraction leak - // TODO: and will need modifications to barretenberg - let proof = - remove_public_inputs(circuit.public_inputs().0.len(), &proof_with_public_inputs); + let proof = bb_abstraction_leaks::remove_public_inputs( + circuit.public_inputs().0.len(), + &proof_with_public_inputs, + ); Ok(proof) } @@ -95,12 +87,10 @@ impl Backend { let flattened_public_inputs: Vec = public_inputs.into_iter().map(|(_, el)| el).collect(); - // Barretenberg expects the proof to be prepended with the public inputs. - // - // TODO: This is an abstraction leak and barretenberg's API should accept the public inputs - // TODO: separately and then prepend them internally - let proof_with_public_inputs = - prepend_public_inputs(proof.to_vec(), flattened_public_inputs.to_vec()); + let proof_with_public_inputs = bb_abstraction_leaks::prepend_public_inputs( + proof.to_vec(), + flattened_public_inputs.to_vec(), + ); // Create a temporary file for the proof let proof_path = temp_directory.join("proof").with_extension("proof"); @@ -142,26 +132,6 @@ pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { } } -/// Removes the public inputs which are prepended to a proof by Barretenberg. -fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { - // Barretenberg prepends the public inputs onto the proof so we need to remove - // the first `num_pub_inputs` field elements. - let num_bytes_to_remove = num_pub_inputs * (FieldElement::max_num_bytes() as usize); - proof[num_bytes_to_remove..].to_vec() -} - -/// Prepends a set of public inputs to a proof. -fn prepend_public_inputs(proof: Vec, public_inputs: Vec) -> Vec { - if public_inputs.is_empty() { - return proof; - } - - let public_inputs_bytes = - public_inputs.into_iter().flat_map(|assignment| assignment.to_be_bytes()); - - public_inputs_bytes.chain(proof).collect() -} - // TODO: See nargo/src/artifacts/mod.rs // TODO: This method should live in ACVM and be the default method for serializing/deserializing circuits pub(super) fn serialize_circuit(circuit: &Circuit) -> Vec { diff --git a/tooling/acvm_backend_barretenberg/src/smart_contract.rs b/tooling/backend_interface/src/smart_contract.rs similarity index 79% rename from tooling/acvm_backend_barretenberg/src/smart_contract.rs rename to tooling/backend_interface/src/smart_contract.rs index e5018c69bd9..5f56557cad4 100644 --- a/tooling/acvm_backend_barretenberg/src/smart_contract.rs +++ b/tooling/backend_interface/src/smart_contract.rs @@ -6,9 +6,6 @@ use crate::{ use acvm::acir::circuit::Circuit; use tempfile::tempdir; -/// Embed the Solidity verifier file -const ULTRA_VERIFIER_CONTRACT: &str = include_str!("contract.sol"); - impl Backend { pub fn eth_contract(&self, circuit: &Circuit) -> Result { let binary_path = self.assert_binary_exists()?; @@ -32,11 +29,7 @@ impl Backend { } .run(binary_path)?; - let verification_key_library = - ContractCommand { crs_path: self.crs_directory(), vk_path }.run(binary_path)?; - - drop(temp_directory); - Ok(format!("{verification_key_library}{ULTRA_VERIFIER_CONTRACT}")) + ContractCommand { crs_path: self.crs_directory(), vk_path }.run(binary_path) } } @@ -67,9 +60,7 @@ mod tests { let contract = get_mock_backend()?.eth_contract(&circuit)?; - assert!(contract.contains("contract BaseUltraVerifier")); - assert!(contract.contains("contract UltraVerifier")); - assert!(contract.contains("library UltraVerificationKey")); + assert!(contract.contains("contract VerifierContract")); Ok(()) } diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/Cargo.lock b/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/Cargo.lock rename to tooling/backend_interface/test-binaries/mock_backend/Cargo.lock diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/Cargo.toml b/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/Cargo.toml rename to tooling/backend_interface/test-binaries/mock_backend/Cargo.toml diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/contract_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs similarity index 72% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/contract_cmd.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs index fb8daf784f1..7ee41121d61 100644 --- a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/contract_cmd.rs +++ b/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs @@ -17,9 +17,5 @@ pub(crate) struct ContractCommand { pub(crate) fn run(args: ContractCommand) { assert!(args.vk_path.is_file(), "Could not find vk file at provided path"); - std::io::stdout() - .write_all( - b"contract BaseUltraVerifier contract UltraVerifier library UltraVerificationKey", - ) - .unwrap(); + std::io::stdout().write_all(b"contract VerifierContract {}").unwrap(); } diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/gates_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/gates_cmd.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/info_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/info_cmd.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/main.rs b/tooling/backend_interface/test-binaries/mock_backend/src/main.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/main.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/main.rs diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/prove_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/prove_cmd.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/verify_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/verify_cmd.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/write_vk_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/write_vk_cmd.rs rename to tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs diff --git a/tooling/bb_abstraction_leaks/Cargo.toml b/tooling/bb_abstraction_leaks/Cargo.toml new file mode 100644 index 00000000000..86164ba11c3 --- /dev/null +++ b/tooling/bb_abstraction_leaks/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "bb_abstraction_leaks" +description = "A crate which encapsulates knowledge about Barretenberg which is currently leaking into Nargo" +version = "0.11.0" +authors.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +acvm.workspace = true + +[build-dependencies] +build-target = "0.4.0" +const_format = "0.2.30" diff --git a/tooling/acvm_backend_barretenberg/build.rs b/tooling/bb_abstraction_leaks/build.rs similarity index 100% rename from tooling/acvm_backend_barretenberg/build.rs rename to tooling/bb_abstraction_leaks/build.rs diff --git a/tooling/acvm_backend_barretenberg/src/contract.sol b/tooling/bb_abstraction_leaks/src/contract.sol similarity index 100% rename from tooling/acvm_backend_barretenberg/src/contract.sol rename to tooling/bb_abstraction_leaks/src/contract.sol diff --git a/tooling/bb_abstraction_leaks/src/lib.rs b/tooling/bb_abstraction_leaks/src/lib.rs new file mode 100644 index 00000000000..799e14c36f0 --- /dev/null +++ b/tooling/bb_abstraction_leaks/src/lib.rs @@ -0,0 +1,34 @@ +#![warn(unused_crate_dependencies, unused_extern_crates)] +#![warn(unreachable_pub)] + +use acvm::FieldElement; + +pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; +pub const BB_DOWNLOAD_URL: &str = env!("BB_BINARY_URL"); + +/// Embed the Solidity verifier file +const ULTRA_VERIFIER_CONTRACT: &str = include_str!("contract.sol"); + +pub fn complete_barretenberg_verifier_contract(contract: String) -> String { + format!("{contract}{ULTRA_VERIFIER_CONTRACT}") +} + +/// Removes the public inputs which are prepended to a proof by Barretenberg. +pub fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { + // Barretenberg prepends the public inputs onto the proof so we need to remove + // the first `num_pub_inputs` field elements. + let num_bytes_to_remove = num_pub_inputs * (FieldElement::max_num_bytes() as usize); + proof[num_bytes_to_remove..].to_vec() +} + +/// Prepends a set of public inputs to a proof. +pub fn prepend_public_inputs(proof: Vec, public_inputs: Vec) -> Vec { + if public_inputs.is_empty() { + return proof; + } + + let public_inputs_bytes = + public_inputs.into_iter().flat_map(|assignment| assignment.to_be_bytes()); + + public_inputs_bytes.chain(proof).collect() +} diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index 0b85c66b6fa..e14d43b9fc3 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -50,7 +50,8 @@ color-eyre = "0.6.2" tokio = { version = "1.0", features = ["io-std"] } # Backends -acvm-backend-barretenberg = { path = "../acvm_backend_barretenberg" } +backend-interface = { path = "../backend_interface" } +bb_abstraction_leaks.workspace = true [target.'cfg(not(unix))'.dependencies] tokio-util = { version = "0.7.8", features = ["compat"] } diff --git a/tooling/nargo_cli/src/backends.rs b/tooling/nargo_cli/src/backends.rs index 8ca8305e180..8b1da2cd118 100644 --- a/tooling/nargo_cli/src/backends.rs +++ b/tooling/nargo_cli/src/backends.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; -use acvm_backend_barretenberg::backends_directory; -pub(crate) use acvm_backend_barretenberg::Backend; +use backend_interface::backends_directory; +pub(crate) use backend_interface::Backend; fn active_backend_file_path() -> PathBuf { backends_directory().join(".selected_backend") diff --git a/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs index 99bca29d3b7..974db9ff7f5 100644 --- a/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs +++ b/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs @@ -1,6 +1,6 @@ use clap::Args; -use acvm_backend_barretenberg::{backends_directory, download_backend}; +use backend_interface::{backends_directory, download_backend}; use crate::errors::{BackendError, CliError}; diff --git a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs index 38ff6d3b744..da37b104d65 100644 --- a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs +++ b/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs @@ -1,4 +1,4 @@ -use acvm_backend_barretenberg::backends_directory; +use backend_interface::backends_directory; use clap::Args; use crate::errors::CliError; diff --git a/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs index 469f099de28..7497f1bc2f6 100644 --- a/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs +++ b/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs @@ -1,6 +1,6 @@ use clap::Args; -use acvm_backend_barretenberg::backends_directory; +use backend_interface::backends_directory; use crate::{ backends::{ diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 16ff311f704..30b865063d3 100644 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -10,6 +10,7 @@ use crate::errors::CliError; use acvm::acir::circuit::Opcode; use acvm::Language; +use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; use clap::Args; use nargo::artifacts::program::PreprocessedProgram; use nargo::package::Package; @@ -46,7 +47,7 @@ pub(crate) fn run( let selection = args.package.map_or(default_selection, PackageSelection::Selected); let workspace = resolve_workspace_from_toml(&toml_path, selection)?; - let (np_language, is_opcode_supported) = backend.get_backend_info()?; + let (np_language, opcode_support) = backend.get_backend_info()?; for package in &workspace { let circuit_build_path = workspace.package_build_path(package); @@ -56,7 +57,7 @@ pub(crate) fn run( circuit_build_path, &args.compile_options, np_language, - &is_opcode_supported, + &|opcode| opcode_support.is_opcode_supported(opcode), )?; let contract_dir = workspace.contracts_directory_path(package); @@ -91,7 +92,12 @@ fn smart_contract_for_package( } }; - let smart_contract_string = backend.eth_contract(&preprocessed_program.bytecode)?; + let mut smart_contract_string = backend.eth_contract(&preprocessed_program.bytecode)?; + + if backend.name() == ACVM_BACKEND_BARRETENBERG { + smart_contract_string = + bb_abstraction_leaks::complete_barretenberg_verifier_contract(smart_contract_string); + } Ok(smart_contract_string) } diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index c769cb68ba5..3ccd99558a7 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -2,6 +2,7 @@ use std::path::Path; use acvm::acir::circuit::Opcode; use acvm::Language; +use backend_interface::BackendOpcodeSupport; use fm::FileManager; use iter_extended::vecmap; use nargo::artifacts::contract::PreprocessedContract; @@ -69,8 +70,14 @@ pub(crate) fn run( .cloned() .partition(|package| package.is_binary()); - let (compiled_programs, compiled_contracts) = - compile_workspace(backend, &binary_packages, &contract_packages, &args.compile_options)?; + let (np_language, opcode_support) = backend.get_backend_info()?; + let (compiled_programs, compiled_contracts) = compile_workspace( + &binary_packages, + &contract_packages, + np_language, + &opcode_support, + &args.compile_options, + )?; // Save build artifacts to disk. for (package, program) in binary_packages.into_iter().zip(compiled_programs) { @@ -84,12 +91,13 @@ pub(crate) fn run( } pub(super) fn compile_workspace( - backend: &Backend, binary_packages: &[Package], contract_packages: &[Package], + np_language: Language, + opcode_support: &BackendOpcodeSupport, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CliError> { - let (np_language, is_opcode_supported) = backend.get_backend_info()?; + let is_opcode_supported = |opcode: &_| opcode_support.is_opcode_supported(opcode); // Compile all of the packages in parallel. let program_results: Vec<(FileManager, CompilationResult)> = binary_packages diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index 8c434f8fe21..b21d2a9b1e0 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -54,10 +54,12 @@ pub(crate) fn run( let workspace = resolve_workspace_from_toml(&toml_path, selection)?; let target_dir = &workspace.target_directory_path(); - let (np_language, is_opcode_supported) = backend.get_backend_info()?; + let (np_language, opcode_support) = backend.get_backend_info()?; for package in &workspace { let compiled_program = - compile_bin_package(package, &args.compile_options, np_language, &is_opcode_supported)?; + compile_bin_package(package, &args.compile_options, np_language, &|opcode| { + opcode_support.is_opcode_supported(opcode) + })?; let (return_value, solved_witness) = execute_program_and_decode(compiled_program, package, &args.prover_name)?; diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index ffa522d25b4..8018308ee54 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,5 +1,5 @@ use acvm::Language; -use acvm_backend_barretenberg::BackendError; +use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; use nargo::package::Package; @@ -55,10 +55,15 @@ pub(crate) fn run( .cloned() .partition(|package| package.is_binary()); - let (compiled_programs, compiled_contracts) = - compile_workspace(backend, &binary_packages, &contract_packages, &args.compile_options)?; + let (np_language, opcode_support) = backend.get_backend_info()?; + let (compiled_programs, compiled_contracts) = compile_workspace( + &binary_packages, + &contract_packages, + np_language, + &opcode_support, + &args.compile_options, + )?; - let (np_language, _) = backend.get_backend_info()?; let program_info = binary_packages .into_par_iter() .zip(compiled_programs) diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index 03146d3919c..cbe6deca1c2 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -63,7 +63,7 @@ pub(crate) fn run( let workspace = resolve_workspace_from_toml(&toml_path, selection)?; let proof_dir = workspace.proofs_directory_path(); - let (np_language, is_opcode_supported) = backend.get_backend_info()?; + let (np_language, opcode_support) = backend.get_backend_info()?; for package in &workspace { let circuit_build_path = workspace.package_build_path(package); @@ -77,7 +77,7 @@ pub(crate) fn run( args.verify, &args.compile_options, np_language, - &is_opcode_supported, + &|opcode| opcode_support.is_opcode_supported(opcode), )?; } diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs index 452d58ff667..8d10d70ddd0 100644 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -50,7 +50,7 @@ pub(crate) fn run( let workspace = resolve_workspace_from_toml(&toml_path, selection)?; let proofs_dir = workspace.proofs_directory_path(); - let (np_language, is_opcode_supported) = backend.get_backend_info()?; + let (np_language, opcode_support) = backend.get_backend_info()?; for package in &workspace { let circuit_build_path = workspace.package_build_path(package); @@ -64,7 +64,7 @@ pub(crate) fn run( &args.verifier_name, &args.compile_options, np_language, - &is_opcode_supported, + &|opcode| opcode_support.is_opcode_supported(opcode), )?; } diff --git a/tooling/nargo_cli/src/errors.rs b/tooling/nargo_cli/src/errors.rs index 205f68f624e..49fa02d281d 100644 --- a/tooling/nargo_cli/src/errors.rs +++ b/tooling/nargo_cli/src/errors.rs @@ -70,7 +70,7 @@ pub(crate) enum CliError { /// Error related to communication with backend. #[error(transparent)] - BackendCommunicationError(#[from] acvm_backend_barretenberg::BackendError), + BackendCommunicationError(#[from] backend_interface::BackendError), } #[derive(Debug, thiserror::Error)] diff --git a/tooling/nargo_cli/src/main.rs b/tooling/nargo_cli/src/main.rs index f4d1e1862fc..92bd7b94988 100644 --- a/tooling/nargo_cli/src/main.rs +++ b/tooling/nargo_cli/src/main.rs @@ -11,15 +11,18 @@ mod backends; mod cli; mod errors; -use color_eyre::{config::HookBuilder, eyre}; +use color_eyre::config::HookBuilder; const PANIC_MESSAGE: &str = "This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml"; -fn main() -> eyre::Result<()> { +fn main() { // Register a panic hook to display more readable panic messages to end-users let (panic_hook, _) = HookBuilder::default().display_env_section(false).panic_section(PANIC_MESSAGE).into_hooks(); panic_hook.install(); - cli::start_cli() + if let Err(report) = cli::start_cli() { + eprintln!("{report}"); + std::process::exit(1); + } } diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_declaration/src/main.nr b/tooling/nargo_cli/tests/compile_failure/dup_trait_declaration/src/main.nr index f4c246c786a..052d7762438 100644 --- a/tooling/nargo_cli/tests/compile_failure/dup_trait_declaration/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/dup_trait_declaration/src/main.nr @@ -21,6 +21,4 @@ trait Default { } fn main(x: Field, y: Field) { - let first = Foo::default(x,y); - assert(first.bar == x); } diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation/src/main.nr b/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation/src/main.nr index cfc098a6ff7..fd4ebe95519 100644 --- a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation/src/main.nr @@ -1,7 +1,7 @@ use dep::std; trait Default { - fn default(x: Field, y: Field) -> Self; + fn default(x: Field, y: Field) -> Field; } struct Foo { @@ -9,20 +9,18 @@ struct Foo { array: [Field; 2], } -// Duplicate trait implementations should not compile impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + y + 2 * x } -} - -// Duplicate trait implementations should not compile -impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: y, array: [y,x] } + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + y + 2 * x } } + fn main(x: Field, y: Field) { } diff --git a/tooling/nargo_cli/tests/compile_failure/impl_struct_not_trait/src/main.nr b/tooling/nargo_cli/tests/compile_failure/impl_struct_not_trait/src/main.nr index e25465378b1..50c142e2f5e 100644 --- a/tooling/nargo_cli/tests/compile_failure/impl_struct_not_trait/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/impl_struct_not_trait/src/main.nr @@ -18,6 +18,4 @@ impl Default for Foo { } fn main(x: Field, y: Field) { - let first = Foo::default(x,y); - assert(first.bar == x); } diff --git a/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/Nargo.toml b/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/Nargo.toml new file mode 100644 index 00000000000..35f174bf546 --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "impl_trait_for_non_type" +type = "bin" +authors = [""] +compiler_version = "0.9.0" + +[dependencies] diff --git a/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/Prover.toml b/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/Prover.toml new file mode 100644 index 00000000000..2c1854573a4 --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/Prover.toml @@ -0,0 +1,2 @@ +x = 1 +y = 2 diff --git a/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/src/main.nr b/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/src/main.nr new file mode 100644 index 00000000000..9dce82e94bf --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/impl_trait_for_non_type/src/main.nr @@ -0,0 +1,17 @@ +use dep::std; + +trait Foo { + fn foo() -> Field; +} + + +// This should not compile, as only types should have impl blocks +// TODO(https://github.com/noir-lang/noir/issues/2568): Right now we only allow structs, but arbitrary types should be allowed. +impl Foo for main { + fn foo() -> Field { + x + y + } +} + +fn main(x: Field, y: Field) { +} diff --git a/tooling/nargo_cli/tests/compile_failure/trait_missing_implementation/src/main.nr b/tooling/nargo_cli/tests/compile_failure/trait_missing_implementation/src/main.nr index bc74b328592..1f69d09924b 100644 --- a/tooling/nargo_cli/tests/compile_failure/trait_missing_implementation/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/trait_missing_implementation/src/main.nr @@ -19,6 +19,4 @@ impl Default for Foo { } fn main(x: Field) { - let first = Foo::method2(x); - assert(first == x); } diff --git a/tooling/nargo_cli/tests/compile_failure/trait_not_in_scope/src/main.nr b/tooling/nargo_cli/tests/compile_failure/trait_not_in_scope/src/main.nr index 9dc57ee395f..2f236e622f0 100644 --- a/tooling/nargo_cli/tests/compile_failure/trait_not_in_scope/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/trait_not_in_scope/src/main.nr @@ -13,6 +13,4 @@ impl Default for Foo { } fn main(x: Field, y: Field) { - let first = Foo::default(x,y); - assert(first.bar == x); } diff --git a/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_name/src/main.nr b/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_name/src/main.nr index 0ba10815efa..470bed9b354 100644 --- a/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_name/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_name/src/main.nr @@ -1,7 +1,6 @@ use dep::std; trait Default { - fn default(x: Field, y: Field) -> Self; } struct Foo { @@ -11,12 +10,10 @@ struct Foo { // wrong trait name method should not compile impl Default for Foo { - fn default_wrong_name(x: Field, y: Field) -> Self { + fn doesnt_exist(x: Field, y: Field) -> Self { Self { bar: x, array: [x,y] } } } fn main(x: Field, y: Field) { - let first = Foo::default_wrong_name(x,y); - assert(first.bar == x); } diff --git a/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_return_type/src/main.nr b/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_return_type/src/main.nr index acd930a6d49..23e46430dbc 100644 --- a/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_return_type/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/trait_wrong_method_return_type/src/main.nr @@ -1,21 +1,16 @@ -use dep::std; - trait Default { - fn default(x: Field, y: Field) -> Self; + fn default() -> Self; } struct Foo { - bar: Field, - array: [Field; 2], } +// This should fail to compile as `default()` should return `Foo` impl Default for Foo { - fn default(x: Field, y: Field) -> Field { + fn default() -> Field { x } } -fn main(x: Field, y: Field) { - let first = Foo::default(x,y); - assert(first.bar == x); +fn main() { } diff --git a/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameter/src/main.nr b/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameter/src/main.nr index 2975aa6b1dd..ae7888e010f 100644 --- a/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameter/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameter/src/main.nr @@ -1,21 +1,15 @@ -use dep::std; - -trait Default { - fn default(x: Field, y: Field) -> Self; +trait FromField { + fn default(x: Field) -> Self; } struct Foo { bar: Field, - array: [Field; 2], } impl Default for Foo { - fn default(x: Field, y: Foo) -> Self { - Self { bar: x, array: [x, y.bar] } + fn default(x: u32) -> Self { } } -fn main(x: Field, y: Field) { - let first = Foo::default(x,y); - assert(first.bar == x); +fn main() { } diff --git a/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameters_count/src/main.nr b/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameters_count/src/main.nr index 92469ae8fdb..4d011ddf737 100644 --- a/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameters_count/src/main.nr +++ b/tooling/nargo_cli/tests/compile_failure/trait_wrong_parameters_count/src/main.nr @@ -16,6 +16,4 @@ impl Default for Foo { } fn main(x: Field, y: Field) { - let first = Foo::default(x,y); - assert(first.bar == x); } diff --git a/tooling/nargo_cli/tests/execute.rs b/tooling/nargo_cli/tests/execute.rs index 9e02951573d..e2bef43b571 100644 --- a/tooling/nargo_cli/tests/execute.rs +++ b/tooling/nargo_cli/tests/execute.rs @@ -12,10 +12,7 @@ mod tests { use super::*; - test_binary::build_test_binary_once!( - mock_backend, - "../acvm_backend_barretenberg/test-binaries" - ); + test_binary::build_test_binary_once!(mock_backend, "../backend_interface/test-binaries"); // include tests generated by `build.rs` include!(concat!(env!("OUT_DIR"), "/execute.rs")); diff --git a/tooling/nargo_cli/tests/execution_success/8_integration/Prover.toml b/tooling/nargo_cli/tests/execution_success/8_integration/Prover.toml deleted file mode 100644 index e4b4fa41073..00000000000 --- a/tooling/nargo_cli/tests/execution_success/8_integration/Prover.toml +++ /dev/null @@ -1,5 +0,0 @@ -a=[867393132, 2339025230, 220695592, 603045882, 2511105607, 2829384008, 3709060370, 165831136, 1055242736, 1021386699, 504214651, 3662346416, 1830680088, 3882307476, 2426040416, 1802701977, 2663953820, 442532338, 1174156258, 2943965281, 2059435796, 2505576606, 666729718, 3602851575, 2784009587, 3495199106, 1721163808, 3787454896, 315490254, 2761503044, 1222736857, 3669200722, 1595984236, 1113969718, 486680564, 3162990949, 3264361924, 2006416798, 2386200406, 315797713, 2613961431, 2248446788, 1487182619, 1426297375, 1728644913, 1251844809, 1725705662, 1593325285, 2204175104, 2086772782, 3535562424, 171941432, 1454717338, 346500936, 3226869878, 1868934392, 4256057877, 1568150812, 3256749490, 2594788417, 1807197388, 3087252400, 1649310565, 2748668146, 3716823811, 3800017989, 932498547, 2480193018, 333760602, 97095822, 4100736518, 2777593334, 2339587180, 3771453942, 3867894936, 3650805881, 1824779553, 1642205658, 4264337791, 4071013475, 1985859040, 4202403275, 2148375036, 2428793574, 314105769, 4225849095, 3500808841, 2684237013, 848348764, 723628347, 1455798875, 3707853370, 1746878741, 1139375098, 3478206320, 3069213335, 112605790, 2440244355, 1471127557, 4092108893] -b=[3828535814, 348916743, 1199414553, 737248839, 756047272, 1292160882, 4257951637, 291617875, 2966142224, 3814394488, 3878026466, 700807834, 2969962294, 1306796485, 3854250602, 898180304, 3427925197, 604266260, 1075521373, 3406840156, 3396422198, 890966269, 1079444598, 988299705, 3071209797, 3808577073, 2135889094, 1194271359, 4006125262, 566871018, 1292670770, 3445252242, 1897364157, 1587048323, 1240078226, 1678980405, 262815752, 304362997, 1104680912, 2632486420, 2463291218, 2187725560, 1870618568, 2652926282, 3004775258, 1952884887, 561428664, 2467226612, 2683547316, 3452779168, 976229927, 1449738410, 3252038428, 2805606398, 1462658417, 1592183545, 2019693157, 3278803512, 3026040550, 566335611, 703403330, 936890230, 2567824938, 890552997, 4217401169, 258050408, 29872215, 812502992, 3871770414, 4261908330, 3703871063, 2429703152, 1496772760, 3466865862, 2739387475, 547994854, 240736540, 3737530356, 545555875, 1243531855, 826369375, 392660683, 262937837, 3055809624, 1979941188, 3982865811, 2062520214, 1365494964, 3851477194, 4086198942, 4210993448, 3262645997, 766395054, 1585427862, 1824837360, 105660195, 3008983983, 845249279, 2566786179, 205438487] -c=[867393132, 2339025230, 220695592, 603045882, ] -d=[3828535814, 348916743, 1199414553, 737248839, ] -m=[77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] diff --git a/tooling/nargo_cli/tests/execution_success/8_integration/src/main.nr b/tooling/nargo_cli/tests/execution_success/8_integration/src/main.nr deleted file mode 100644 index 52f53efd3aa..00000000000 --- a/tooling/nargo_cli/tests/execution_success/8_integration/src/main.nr +++ /dev/null @@ -1,283 +0,0 @@ -fn matrix_mul_2(a: [u32; 4], b: [u32; 4]) ->[u32; 4] { - let mut c = [0 as u32; 4]; - for i in 0..2 { - for j in 0..2 { - c[i+2*j] = 0; - for k in 0..2 { - c[i+2*j] += a[i+2*k] * b[k+2*j]; - } - } - } - c -} - -fn matrix_mul_10(a: [u32; 100], b: [u32; 100]) -> [u32; 100] { - let mut c = [0 as u32; 100]; - for i in 0..10 { - for j in 0..10 { - c[i+10*j] = 0 as u32; - - for k in 0..10 { - c[i+10*j] += a[i+10*k] * b[k+10*j]; - } - } - } - c -} - - -fn siggy(x: u32) -> u32 { - x * (10 as u32) -} - - -fn test4 (mut a: [u32; 4]) -> [u32; 4] { - for i in 3..4 { - a[i] = siggy(a[i-2]); - } - a -} - -fn iterate1(mut a0: u32) -> u32{ - let mut t1 = 0 as u32; - let mut t2 = 0 as u32; - let mut a = 1 as u32; - let mut f = 2 as u32; - let mut g = 3 as u32; - let mut h = 4 as u32; - - for _i in 0..2 { - t1 = h; - h = g; - g = f; - a = t1 + t2; - } - a0 += a; - a0 -} - -fn array_noteq(a: [u32; 4], b: [u32; 4]) { - assert(a != b); -} - -fn test3(mut b: [Field; 4]) -> [Field; 4] { - for i in 0..4 { - b[i] = i; - } - b -} - -fn iterate2(mut hash: [u32; 8]) -> [u32; 8] { - let mut t1 = 0 as u32; - - let mut a = hash[0]; - let mut e = hash[4]; - let mut f = hash[5]; - let mut g = hash[6]; - let mut h = hash[7]; - - for _i in 0..2 { - t1 = ch2(e, f); - h = g; - g = f; - a = t1; - } - - hash[0] = hash[0] + a; - hash -} - -fn iterate3( mut hash: [u32; 8]) -> [u32; 8] { - let mut t1 = 0 as u32; - let mut t2 = 0 as u32; - let mut a = hash[0]; - let mut b = hash[1]; - let mut c = hash[2]; - let mut d = hash[3]; - let mut e = hash[4]; - let mut f = hash[5]; - let mut g = hash[6]; - let mut h = hash[7]; - - for _i in 0..3 { - t1 = ep2(e)+ch2(e, f); - h = g; - g = f; - a = t1+t2; - } - assert(a == 2470696267); - hash[0] = hash[0] + a; - hash[1] = hash[1] + b; - hash[2] = hash[2] + c; - hash[3] = hash[3] + d; - hash[4] = hash[4] + e; - hash[5] = hash[5] + f; - hash[6] = hash[6] + g; - hash[7] = hash[7] + h; - hash -} - - -fn test5() { - let mut sha_hash = [ - 0 as u32, 1, 2, 3, - 4, 5, 6, 7 - ]; - - sha_hash = iterate2(sha_hash); - - assert(sha_hash[0] == 9); -} - - -fn ch2(x: u32, y: u32) -> u32 { - x + y -} - -fn ep2(x: u32) -> u32 { - (2 as u32) * too(x) -} - -fn too(x: u32) -> u32 { - (x + 17 as u32) * (x + 3 as u32) -} - -fn test6(x: [u8; 32]) -> [u32; 8] { - let mut sha_m = [0 as u32; 64]; - - let mut sha_hash = [ - 1 as u32, 2, 3, 4, 5, 6, 7, 8 - ]; - - let mut buffer = [0 as u8; 64]; - for i in 0..32 { - buffer[i] = x[i]; - } - - sha_m = iterate6_1(sha_m, buffer); - sha_hash = iterate6_2(sha_m, sha_hash); - sha_hash -} - -fn iterate6_1(mut sha_m: [u32; 64], next_chunk: [u8; 64]) -> [u32; 64] { - let mut j = 0; - for i in 0..16 { - j = (i ) * 4; - sha_m[i] = ((next_chunk[j] as u32) << 24 as u32) - | ((next_chunk[j + 1] as u32) << 16 as u32) - | ((next_chunk[j + 2] as u32) << 8 as u32) - | (next_chunk[j + 3] as u32); - } - for i in 16..64 { - sha_m[i] = sig1(sha_m[i - 2])+(sha_m[i - 7])+(sig0(sha_m[i - 15]))+(sha_m[i - 16]); - } - sha_m -} - -fn iterate6_2(sha_m: [u32; 64], mut hash: [u32; 8]) -> [u32; 8] { - let mut t1 = 0 as u32; - let mut t2 = 0 as u32; - let mut a = 1 as u32; - let mut b = 2 as u32; - let mut c = 3 as u32; - let mut d = 4 as u32; - let mut e = 5 as u32; - let mut f = 6 as u32; - let mut g = 7 as u32; - let mut h = 8 as u32; - - for i in 0..11 { - t1 = h + ep1(e) + ch(e, f, g) + sha_m[i]; - t2 = epo(a) + maj(a, b, c); - h = g; - g = f; - f = e; - e = d+t1; - d = c; - c = b; - b = a; - a = t1+t2; - } - - hash[0] = hash[0]+a; - hash[1] = hash[1]+b; - hash[2] = hash[2]+c; - hash[3] = hash[3]+d; - hash[4] = hash[4]+e; - hash[5] = hash[5]+f; - hash[6] = hash[6]+g; - hash[7] = hash[7]+h; - hash -} - -fn rot_right(a: u32, b: u32) -> u32 { - ((a >> b) | (a << (32 as u32 - b))) -} - - -fn ch(x: u32, y: u32, z: u32) -> u32 { - ((x & y) ^ (!x & z)) -} - - -fn maj(x: u32, y: u32, z: u32) -> u32 { - ((x & y) ^ (x & z) ^ (y & z)) -} - - -fn epo(x: u32) -> u32 { - (rot_right(x, 2) ^ rot_right(x, 13) ^ rot_right(x, 22)) -} - -fn ep1(x: u32) -> u32 { - (rot_right(x, 6) ^ rot_right(x, 11) ^ rot_right(x, 25)) -} - -fn sig0(x: u32) -> u32 { - (rot_right(x, 7) ^ rot_right(x, 18) ^ (x >> 3)) -} - -fn sig1(x: u32) -> u32 { - (rot_right(x, 17) ^ rot_right(x, 19) ^ (x >> 10)) -} - - -fn main(a: [u32; 100], b: [u32; 100], c: [u32; 4], mut d: [u32; 4], m: [u8; 32]) { - let e = matrix_mul_10(a,b); - assert(e[6] == 1866842232); - let f = matrix_mul_2(c,d); - assert(f[3] == 2082554100); - - let mut a = [1 as u32, 2, 3, 4]; - a = test4(a); - assert(a[3] == 20); - a = test4(c); - assert(a[3] == c[1] * 10); - - d[0] += c[0]; - d[0] += c[1]; - assert(d[0] == 2739986880); - - let h = iterate1(1); - assert(h == 4); - - let x = d; - array_noteq(x, [d[0], d[1], d[2], 0]); - - let mut h5 = [d[0] as Field, d[1] as Field, d[2] as Field, d[3] as Field]; - let t5 = test3(h5); - assert(t5[3] == 3); - h5 = test3(h5); - assert(h5[3] == 3); - - test5(); - - let mut sha_hash = [ - 0x6a09e667 as u32, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, - 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 - ]; - sha_hash = iterate3(sha_hash); - - let h6 = test6(m); - assert(h6[0] == 523008072); //31.. 3800709683 -} diff --git a/tooling/nargo_cli/tests/execution_success/8_integration/Nargo.toml b/tooling/nargo_cli/tests/execution_success/conditional_1/Nargo.toml similarity index 77% rename from tooling/nargo_cli/tests/execution_success/8_integration/Nargo.toml rename to tooling/nargo_cli/tests/execution_success/conditional_1/Nargo.toml index 56a156ee1d7..af1efa03e82 100644 --- a/tooling/nargo_cli/tests/execution_success/8_integration/Nargo.toml +++ b/tooling/nargo_cli/tests/execution_success/conditional_1/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "8_integration" +name = "conditional_1" type = "bin" authors = [""] compiler_version = "0.1" diff --git a/tooling/nargo_cli/tests/execution_success/9_conditional/Prover.toml b/tooling/nargo_cli/tests/execution_success/conditional_1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/9_conditional/Prover.toml rename to tooling/nargo_cli/tests/execution_success/conditional_1/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_1/src/main.nr b/tooling/nargo_cli/tests/execution_success/conditional_1/src/main.nr new file mode 100644 index 00000000000..4d1eeae0fe7 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/conditional_1/src/main.nr @@ -0,0 +1,100 @@ +use dep::std; + +fn sort(mut a: [u32; 4]) -> [u32; 4] { + for i in 1..4 { + for j in 0..i { + if a[i] < a[j] { + let c = a[j]; + a[j] = a[i]; + a[i] = c; + } + } + } + a +} + + +fn must_be_zero(x: u8) { + assert(x == 0); +} + +fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ + + //Test case for short-circuit + let mut data = [0 as u32; 32]; + let mut ba = a; + for i in 0..32 { + let i_u32 = i as u32; + if i_u32 == a { + for j in 0..4 { + data[i + j] = c[4 - 1 - j]; + for k in 0..4 { + ba = ba +data[k]; + } + if ba == 4864 { + c[3]=ba; + } + } + } + } + assert(data[31] == 0); + assert(ba != 13); + + + //Test case for conditional with arrays from function parameters + let b = sort([1,2,3,4]); + assert(b[0] == 1); + + if a == 0 { + must_be_zero(0); + c[0] = 3; + } else { + must_be_zero(1); + c[0] = 1; + c[1] = c[2] / a + 11 % a; + let f1 = a as Field; + assert(10/f1 != 0); + } + assert(c[0] == 3); + + let mut y = 0; + if a == 0 { + let digest = std::hash::sha256(x); + y = digest[0]; + } else { + y = 5; + } + assert(y == result[0]); + c = sort(c); + assert(c[0] == 0); + + //test 1 + let mut x: u32 = 0; + if a == 0 { + c[0] = 12; + if a != 0 { + x = 6; + } else { + x = 2; + assert(x == 2); + } + } else { + x = 5; + assert(x == 5); + } + if c[0] == 0 { + x = 3; + } + assert(x == 2); + + //test2: loops + let mut x: u32 = 0; + x = a - a; + for i in 0..4 { + if c[i] == 0 { + x = i as u32 +2; + } + } + assert(x == 0); + +} \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/9_conditional/Nargo.toml b/tooling/nargo_cli/tests/execution_success/conditional_2/Nargo.toml similarity index 77% rename from tooling/nargo_cli/tests/execution_success/9_conditional/Nargo.toml rename to tooling/nargo_cli/tests/execution_success/conditional_2/Nargo.toml index 3aa36068cf2..1401a715eef 100644 --- a/tooling/nargo_cli/tests/execution_success/9_conditional/Nargo.toml +++ b/tooling/nargo_cli/tests/execution_success/conditional_2/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "9_conditional" +name = "conditional_2" type = "bin" authors = [""] compiler_version = "0.1" diff --git a/tooling/nargo_cli/tests/execution_success/conditional_2/Prover.toml b/tooling/nargo_cli/tests/execution_success/conditional_2/Prover.toml new file mode 100644 index 00000000000..73fa4a5e31a --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/conditional_2/Prover.toml @@ -0,0 +1,2 @@ +c=[2, 4, 3, 0, ] +a=0 diff --git a/tooling/nargo_cli/tests/execution_success/conditional_2/src/main.nr b/tooling/nargo_cli/tests/execution_success/conditional_2/src/main.nr new file mode 100644 index 00000000000..af5b46c003e --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/conditional_2/src/main.nr @@ -0,0 +1,54 @@ +use dep::std; + + +fn must_be_zero(x: u8) { + assert(x == 0); +} + +fn test3 (x: u8) { + if x == 0 { + must_be_zero(x); + } +} + +fn test4() -> [u32; 4] { + let b: [u32; 4] = [1,2,3,4]; + b +} + +fn main(a: u32, mut c: [u32; 4]) { + + test3(1); + + if a == 0 { + c = test4(); + } else { + assert(c[1] != 2); + } + if false { + c[1] = 5; + } + assert(c[1] == 2); + + test5(4); + + // Test case for function synchronisation + let mut c_sync = 0; + if a == 42 { + c_sync = foo2(); + } else { + c_sync = foo2() + foo2(); + } + assert(c_sync == 6); +} + +fn test5(a : u32) { + if a > 1 { + let q = a / 2; + assert(q == 2); + } +} + +fn foo2() -> Field { + 3 +} diff --git a/tooling/nargo_cli/tests/execution_success/conditional_3_regression/Nargo.toml b/tooling/nargo_cli/tests/execution_success/conditional_3_regression/Nargo.toml new file mode 100644 index 00000000000..868208a4fe2 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/conditional_3_regression/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "conditional_3_regression" +type = "bin" +authors = [""] +compiler_version = "0.1" + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/conditional_3_regression/Prover.toml b/tooling/nargo_cli/tests/execution_success/conditional_3_regression/Prover.toml new file mode 100644 index 00000000000..baad8be126a --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/conditional_3_regression/Prover.toml @@ -0,0 +1,38 @@ +c=[2, 4, 3, 0, ] +a=0 +x = [104, 101, 108, 108, 111] + +result = [ + 0x2c, + 0xf2, + 0x4d, + 0xba, + 0x5f, + 0xb0, + 0xa3, + 0x0e, + 0x26, + 0xe8, + 0x3b, + 0x2a, + 0xc5, + 0xb9, + 0xe2, + 0x9e, + 0x1b, + 0x16, + 0x1e, + 0x5c, + 0x1f, + 0xa7, + 0x42, + 0x5e, + 0x73, + 0x04, + 0x33, + 0x62, + 0x93, + 0x8b, + 0x98, + 0x24, +] diff --git a/tooling/nargo_cli/tests/execution_success/9_conditional/src/main.nr b/tooling/nargo_cli/tests/execution_success/conditional_3_regression/src/main.nr similarity index 56% rename from tooling/nargo_cli/tests/execution_success/9_conditional/src/main.nr rename to tooling/nargo_cli/tests/execution_success/conditional_3_regression/src/main.nr index c1091304e03..cc3e2b2c5ba 100644 --- a/tooling/nargo_cli/tests/execution_success/9_conditional/src/main.nr +++ b/tooling/nargo_cli/tests/execution_success/conditional_3_regression/src/main.nr @@ -1,17 +1,6 @@ use dep::std; -fn sort(mut a: [u32; 4]) -> [u32; 4] { - for i in 1..4 { - for j in 0..i { - if a[i] < a[j] { - let c = a[j]; - a[j] = a[i]; - a[i] = c; - } - } - } - a -} + fn call_intrinsic(x: [u8; 5], result: [u8; 32]) { let mut digest = std::hash::sha256(x); @@ -20,16 +9,6 @@ fn call_intrinsic(x: [u8; 5], result: [u8; 32]) { assert(digest == result); } -fn must_be_zero(x: u8) { - assert(x == 0); -} - -fn test3 (x: u8) { - if x == 0 { - must_be_zero(x); - } -} - fn test4() -> [u32; 4] { let b: [u32; 4] = [1,2,3,4]; b @@ -87,25 +66,6 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ let result1_false = test(false); assert(result1_false.array_param[0] == 0); - //Test case for short-circuit - let mut data = [0 as u32; 32]; - let mut ba = a; - for i in 0..32 { - let i_u32 = i as u32; - if i_u32 == a { - for j in 0..4 { - data[i + j] = c[4 - 1 - j]; - for k in 0..4 { - ba = ba +data[k]; - } - if ba == 4864 { - c[3]=ba; - } - } - } - } - assert(data[31] == 0); - assert(ba != 13); //regression for short-circuit2 if 35 == a { assert(false); @@ -118,93 +78,15 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ assert(c[1] != 2); call_intrinsic(x, result); - //Test case for conditional with arrays from function parameters - let b = sort([1,2,3,4]); - assert(b[0] == 1); - - if a == 0 { - must_be_zero(0); - c[0] = 3; - } else { - must_be_zero(1); - c[0] = 1; - c[1] = c[2] / a + 11 % a; - let f1 = a as Field; - assert(10/f1 != 0); - } - assert(c[0] == 3); - - let mut y = 0; - if a == 0 { - let digest = std::hash::sha256(x); - y = digest[0]; - } else { - y = 5; - } - assert(y == result[0]); - c = sort(c); - assert(c[0] == 0); - - //test 1 - let mut x: u32 = 0; - if a == 0 { - c[0] = 12; - if a != 0 { - x = 6; - } else { - x = 2; - assert(x == 2); - } - } else { - x = 5; - assert(x == 5); - } - if c[0] == 0 { - x = 3; - } - assert(x == 2); - - //test2: loops! - x = 0; - x = a - a; - for i in 0..4 { - if c[i] == 0 { - x = i as u32 +2; - } - } - assert(x == 0); - - test3(1); - - if a == 0 { - c = test4(); - } else { - assert(c[1] != 2); - } - if false { - c[1] = 5; - } - assert(c[1] == 2); - - test5(4); // Regression for issue #661: let mut c_661 :[u32;1]=[0]; if a > 5 { c_661 = issue_661_foo(issue_661_bar(c), a); } else { - c_661 = issue_661_foo(issue_661_bar(c), x); + c_661 = issue_661_foo(issue_661_bar(c), a + 2); } assert(c_661[0] < 20000); - - // Test case for function synchronisation - let mut c_sync = 0; - if a == 42 { - c_sync = foo2(); - } else { - c_sync = foo2() + foo2(); - } - assert(c_sync == 6); // Regression for predicate simplification safe_inverse(0); @@ -262,10 +144,6 @@ fn issue_661_bar(a : [u32;4]) ->[u32;4] { b } -fn foo2() -> Field { - 3 -} - fn safe_inverse(n: Field) -> Field { if n == 0 { diff --git a/tooling/nargo_cli/tests/execution_success/regression/src/main.nr b/tooling/nargo_cli/tests/execution_success/regression/src/main.nr index 54769c39709..4ae4f6a91f1 100644 --- a/tooling/nargo_cli/tests/execution_success/regression/src/main.nr +++ b/tooling/nargo_cli/tests/execution_success/regression/src/main.nr @@ -71,6 +71,26 @@ fn enc(value: [u8; N], value_length: Field) -> ([u8; 32], Field) } } +fn bitshift_literal_0() -> u64 { + let mut bits: u64 = 0; + bits |= 1 << 0; + + bits +} +fn bitshift_literal_4() -> u64 { + let mut bits: u64 = 0; + bits |= 1 << 4; + + bits +} +fn bitshift_variable(idx: u64) -> u64 { + let mut bits: u64 = 0; + bits |= 1 << idx; + + bits +} + + fn main(x: [u8; 5], z: Field) { //Issue 1144 @@ -87,4 +107,14 @@ fn main(x: [u8; 5], z: Field) assert(enc_val1.0 == [0x94,0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]); assert(enc_val1.1 == 21); + // Issue 2399 + let result_0 = bitshift_literal_0(); + assert(result_0 == 1); + let result_4 = bitshift_literal_4(); + assert(result_4 == 16); + let result_0 = bitshift_variable(0); + assert(result_0 == 1); + let result_4 = bitshift_variable(4); + assert(result_4 == 16); + } diff --git a/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Nargo.toml b/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Nargo.toml new file mode 100644 index 00000000000..952e7d88a5a --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "signed_arithmetic" +type = "bin" +authors = [""] +compiler_version = "0.1" + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Prover.toml b/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Prover.toml new file mode 100644 index 00000000000..e0e584b7380 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Prover.toml @@ -0,0 +1,3 @@ +x = "5" +y = "8" +z = "-15" diff --git a/tooling/nargo_cli/tests/execution_success/signed_arithmetic/src/main.nr b/tooling/nargo_cli/tests/execution_success/signed_arithmetic/src/main.nr new file mode 100644 index 00000000000..c2a1b580f40 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/signed_arithmetic/src/main.nr @@ -0,0 +1,32 @@ +fn main(mut x: i32, mut y: i32, z: i32) { + let mut s1: i8 = 5; + let mut s2: i8 = 8; + assert(s1+s2 == 13); + assert(x + y == 13); + + s2= -8; + y = -y; + assert(s1+s2 == -3); + assert(x + y == -3); + + s1 = -15; + assert(s1-s2 == -7); + assert(z-y == -7); + + s1 = -5; + s2 = 8; + x = -x; + y = -y; + assert(s1-s2 == -13); + assert(x-y == -13); + + s2 = -8; + y = -y; + assert(s1*s2 == 40); + assert(x*y == 40); + + s1 = 1; + s2 = -8; + assert(s1*s2 == -8); + assert(x/x*y == -8); +} diff --git a/tooling/nargo_cli/tests/execution_success/trait_self/Nargo.toml b/tooling/nargo_cli/tests/execution_success/trait_self/Nargo.toml new file mode 100644 index 00000000000..0dfaea44862 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/trait_self/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "trait_self" +type = "bin" +authors = [""] +compiler_version = "0.10.5" + +[dependencies] \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/trait_self/src/main.nr b/tooling/nargo_cli/tests/execution_success/trait_self/src/main.nr new file mode 100644 index 00000000000..c116795a128 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/trait_self/src/main.nr @@ -0,0 +1,27 @@ +trait ATrait { + fn asd() -> Self; +} + +struct Foo { + x: Field +} +impl ATrait for Foo { + fn asd() -> Self { + // This should pass as Self should be bound to Foo while typechecking this + Foo{x: 100} + } +} + +struct Bar { + x: Field +} +impl ATrait for Bar { + // The trait method is declared as returning `Self` + // but explicitly specifying the type in the impl should work + fn asd() -> Bar { + Bar{x: 100} + } +} + +fn main() { +} \ No newline at end of file diff --git a/tooling/noir_js/.eslintignore b/tooling/noir_js/.eslintignore new file mode 100644 index 00000000000..3c3629e647f --- /dev/null +++ b/tooling/noir_js/.eslintignore @@ -0,0 +1 @@ +node_modules diff --git a/tooling/noir_js/.eslintrc.js b/tooling/noir_js/.eslintrc.js new file mode 100644 index 00000000000..33335c2a877 --- /dev/null +++ b/tooling/noir_js/.eslintrc.js @@ -0,0 +1,3 @@ +module.exports = { + extends: ["../../.eslintrc.js"], +}; diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index 5fc7bbcb62f..11a7d9197b1 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -17,7 +17,8 @@ "main": "lib/index.js", "types": "lib/index.d.ts", "scripts": { - "build": "tsc" + "build": "tsc", + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "devDependencies": { "typescript": "^5.2.2" diff --git a/tooling/noir_js/src/index.ts b/tooling/noir_js/src/index.ts index 2b3bf593d0c..3455663f046 100644 --- a/tooling/noir_js/src/index.ts +++ b/tooling/noir_js/src/index.ts @@ -1,3 +1,3 @@ import * as acvm from "@noir-lang/acvm_js"; import * as noirc from "@noir-lang/noirc_abi"; -export { acvm, noirc } \ No newline at end of file +export { acvm, noirc }; diff --git a/tooling/noirc_abi/src/input_parser/mod.rs b/tooling/noirc_abi/src/input_parser/mod.rs index 139f3276179..fc6cd4b3b30 100644 --- a/tooling/noirc_abi/src/input_parser/mod.rs +++ b/tooling/noirc_abi/src/input_parser/mod.rs @@ -1,5 +1,5 @@ -use num_bigint::BigUint; -use num_traits::Num; +use num_bigint::{BigInt, BigUint}; +use num_traits::{Num, Zero}; use std::collections::BTreeMap; use acvm::FieldElement; @@ -201,10 +201,48 @@ fn parse_str_to_field(value: &str) -> Result { } } +fn parse_str_to_signed(value: &str, witdh: u32) -> Result { + if value.starts_with("0x") { + FieldElement::from_hex(value).ok_or_else(|| InputParserError::ParseHexStr(value.to_owned())) + } else { + BigInt::from_str_radix(value, 10) + .map_err(|err_msg| InputParserError::ParseStr(err_msg.to_string())) + .and_then(|bigint| { + let modulus: BigInt = FieldElement::modulus().into(); + let bigint = if bigint.sign() == num_bigint::Sign::Minus { + BigInt::from(2).pow(witdh) + bigint + } else { + bigint + }; + if bigint.is_zero() || (bigint.sign() == num_bigint::Sign::Plus && bigint < modulus) + { + Ok(field_from_big_int(bigint)) + } else { + Err(InputParserError::ParseStr(format!( + "Input exceeds field modulus. Values must fall within [0, {})", + FieldElement::modulus(), + ))) + } + }) + } +} + fn field_from_big_uint(bigint: BigUint) -> FieldElement { FieldElement::from_be_bytes_reduce(&bigint.to_bytes_be()) } +fn field_from_big_int(bigint: BigInt) -> FieldElement { + match bigint.sign() { + num_bigint::Sign::Minus => { + unreachable!( + "Unsupported negative value; it should only be called with a positive value" + ) + } + num_bigint::Sign::NoSign => FieldElement::zero(), + num_bigint::Sign::Plus => FieldElement::from_be_bytes_reduce(&bigint.to_bytes_be().1), + } +} + #[cfg(test)] mod test { use acvm::FieldElement; diff --git a/tooling/noirc_abi/src/input_parser/toml.rs b/tooling/noirc_abi/src/input_parser/toml.rs index 3f7ec30e355..dc47cbcda37 100644 --- a/tooling/noirc_abi/src/input_parser/toml.rs +++ b/tooling/noirc_abi/src/input_parser/toml.rs @@ -1,4 +1,4 @@ -use super::{parse_str_to_field, InputValue}; +use super::{parse_str_to_field, parse_str_to_signed, InputValue}; use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; use acvm::FieldElement; use iter_extended::{try_btree_map, try_vecmap}; @@ -118,9 +118,13 @@ impl InputValue { (TomlTypes::String(string), AbiType::String { .. }) => InputValue::String(string), ( TomlTypes::String(string), - AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, + AbiType::Field + | AbiType::Integer { sign: crate::Sign::Unsigned, .. } + | AbiType::Boolean, ) => InputValue::Field(parse_str_to_field(&string)?), - + (TomlTypes::String(string), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + InputValue::Field(parse_str_to_signed(&string, *width)?) + } ( TomlTypes::Integer(integer), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, diff --git a/tooling/noirc_abi_wasm/.eslintrc.js b/tooling/noirc_abi_wasm/.eslintrc.js index b1346a8792f..33335c2a877 100644 --- a/tooling/noirc_abi_wasm/.eslintrc.js +++ b/tooling/noirc_abi_wasm/.eslintrc.js @@ -1,19 +1,3 @@ module.exports = { - root: true, - parser: "@typescript-eslint/parser", - plugins: ["@typescript-eslint", "prettier"], - extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], - rules: { - "comma-spacing": ["error", { before: false, after: true }], - "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": [ - "warn", // or "error" - { - argsIgnorePattern: "^_", - varsIgnorePattern: "^_", - caughtErrorsIgnorePattern: "^_", - }, - ], - "prettier/prettier": "error", - }, + extends: ["../../.eslintrc.js"], }; diff --git a/tooling/noirc_abi_wasm/installPhase.sh b/tooling/noirc_abi_wasm/installPhase.sh index a71b3f8cd0e..d9b94f2d171 100755 --- a/tooling/noirc_abi_wasm/installPhase.sh +++ b/tooling/noirc_abi_wasm/installPhase.sh @@ -1,8 +1,10 @@ #!/usr/bin/env bash export self_path=$(dirname "$(readlink -f "$0")") -mkdir -p $out -cp $self_path/README.md $out/ -cp $self_path/package.json $out/ -cp -r $self_path/nodejs $out/ -cp -r $self_path/web $out/ +export out_path=$out/noirc_abi_wasm + +mkdir -p $out_path +cp $self_path/README.md $out_path/ +cp $self_path/package.json $out_path/ +cp -r $self_path/nodejs $out_path/ +cp -r $self_path/web $out_path/ diff --git a/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts b/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts index 1328967676d..8a3b6fe9ffc 100644 --- a/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts +++ b/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts @@ -16,4 +16,4 @@ it("recovers original inputs when abi encoding and decoding", async () => { BigInt(inputs.bar[1]) ); expect(decoded_inputs.return_value).to.be.null; -}); \ No newline at end of file +}); diff --git a/tooling/noirc_abi_wasm/test/shared/abi_encode.ts b/tooling/noirc_abi_wasm/test/shared/abi_encode.ts index 7c3721b9efe..9664f4e5a45 100644 --- a/tooling/noirc_abi_wasm/test/shared/abi_encode.ts +++ b/tooling/noirc_abi_wasm/test/shared/abi_encode.ts @@ -17,4 +17,4 @@ export const abi = { export const inputs = { foo: "1", bar: ["1", "2"], -}; \ No newline at end of file +}; diff --git a/wasm-bindgen-cli.nix b/wasm-bindgen-cli.nix index 38b7b0a79c1..7c3910f032e 100644 --- a/wasm-bindgen-cli.nix +++ b/wasm-bindgen-cli.nix @@ -7,6 +7,7 @@ , stdenv , curl , darwin +, libiconv , runCommand }: @@ -23,7 +24,12 @@ rustPlatform.buildRustPackage rec { nativeBuildInputs = [ pkg-config ]; - buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ curl darwin.apple_sdk.frameworks.Security ]; + buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ + curl + # Need libiconv and apple Security on Darwin. See https://github.com/ipetkov/crane/issues/156 + libiconv + darwin.apple_sdk.frameworks.Security + ]; doCheck = false; @@ -34,4 +40,4 @@ rustPlatform.buildRustPackage rec { maintainers = with maintainers; [ nitsky rizary ]; mainProgram = "wasm-bindgen"; }; -} \ No newline at end of file +} diff --git a/yarn.lock b/yarn.lock index 6d8c11a8eb7..afba4a47a91 100644 --- a/yarn.lock +++ b/yarn.lock @@ -22,9 +22,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:^0.5.1": - version: 0.5.1 - resolution: "@aztec/bb.js@npm:0.5.1" +"@aztec/bb.js@npm:^0.6.7": + version: 0.6.7 + resolution: "@aztec/bb.js@npm:0.6.7" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -32,7 +32,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: 0a191106e6c6eb842181bf856779b2b461136e07a60068b9ae15e9f2e42f993fe4122c343f5946bc16654ef560b227850e8cba3a10c7c3a0a3940d606647f694 + checksum: 9067e9c4c5e51de173261bb5feebe6c4f6fc2be0381e2b30301fd5ed6794c5a20f5242427b7701384bc6285cd65e04e2fa914010923d1671cf59c8674b6545eb languageName: node linkType: hard @@ -392,14 +392,7 @@ __metadata: languageName: node linkType: hard -"@noir-lang/acvm_js@npm:^0.26.0": - version: 0.26.0 - resolution: "@noir-lang/acvm_js@npm:0.26.0" - checksum: 3325b611fec4531363cb67682d50ea40cbeaf8da7672c8373cb11bc7786308a14c61804950b4805e071edc9298005039847621a8d79fae185eda2cf981c64b39 - languageName: node - linkType: hard - -"@noir-lang/noir_js@workspace:tooling/noir_js": +"@noir-lang/noir_js@workspace:*, @noir-lang/noir_js@workspace:tooling/noir_js": version: 0.0.0-use.local resolution: "@noir-lang/noir_js@workspace:tooling/noir_js" dependencies: @@ -4288,10 +4281,9 @@ __metadata: version: 0.0.0-use.local resolution: "integration-tests@workspace:compiler/integration-tests" dependencies: - "@aztec/bb.js": ^0.5.1 - "@noir-lang/acvm_js": ^0.26.0 + "@aztec/bb.js": ^0.6.7 + "@noir-lang/noir_js": "workspace:*" "@noir-lang/noir_wasm": "workspace:*" - "@noir-lang/noirc_abi": "workspace:*" "@noir-lang/source-resolver": "workspace:*" "@web/dev-server-esbuild": ^0.3.6 "@web/test-runner": ^0.15.3