From f681ccff72f82f0fa2835285e0aedbddaef71d3a Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 26 Sep 2023 15:46:45 +0100 Subject: [PATCH 1/2] chore(ci): run `noir_js` tests in CI (#2843) --- .github/workflows/acvm-test-acvm-js.yml | 14 ++++- .github/workflows/test-noir-js.yml | 59 +++++++++++++++++++ acvm-repo/acvm_js/.gitignore | 14 ++--- acvm-repo/acvm_js/buildPhaseCargoCommand.sh | 11 ++-- acvm-repo/acvm_js/installPhase.sh | 16 ++--- .../acvm_js/nix/wasm-bindgen-cli/default.nix | 37 ------------ .../test/browser/execute_circuit.test.ts | 2 +- .../test/browser/witness_conversion.test.ts | 5 +- .../acvm_js/test/node/build_info.test.ts | 2 +- .../acvm_js/test/node/execute_circuit.test.ts | 2 +- .../test/node/witness_conversion.test.ts | 2 +- acvm-repo/acvm_js/test/shared/addition.ts | 2 +- .../test/shared/complex_foreign_call.ts | 2 +- acvm-repo/acvm_js/test/shared/foreign_call.ts | 2 +- tooling/noir_js/package.json | 4 +- tooling/noir_js/src/witness_generation.ts | 4 +- 16 files changed, 104 insertions(+), 74 deletions(-) create mode 100644 .github/workflows/test-noir-js.yml delete mode 100644 acvm-repo/acvm_js/nix/wasm-bindgen-cli/default.nix diff --git a/.github/workflows/acvm-test-acvm-js.yml b/.github/workflows/acvm-test-acvm-js.yml index d5a9dc07239..8a469f6ff1e 100644 --- a/.github/workflows/acvm-test-acvm-js.yml +++ b/.github/workflows/acvm-test-acvm-js.yml @@ -51,7 +51,12 @@ jobs: uses: actions/download-artifact@v3 with: name: acvm-js - path: ./acvm-repo/result + path: ./result + + - name: Move build artifacts + run: | + mv ./result/acvm_js/nodejs ./acvm-repo/acvm_js/nodejs + mv ./result/acvm_js/web ./acvm-repo/acvm_js/web - name: Set up test environment uses: ./.github/actions/setup @@ -72,7 +77,12 @@ jobs: uses: actions/download-artifact@v3 with: name: acvm-js - path: ./acvm-repo/result + path: ./result + + - name: Move build artifacts + run: | + mv ./result/acvm_js/nodejs ./acvm-repo/acvm_js/nodejs + mv ./result/acvm_js/web ./acvm-repo/acvm_js/web - name: Set up test environment uses: ./.github/actions/setup diff --git a/.github/workflows/test-noir-js.yml b/.github/workflows/test-noir-js.yml new file mode 100644 index 00000000000..3a4868f4b1c --- /dev/null +++ b/.github/workflows/test-noir-js.yml @@ -0,0 +1,59 @@ +name: Noir JS + +on: + pull_request: + merge_group: + push: + branches: + - master + +jobs: + test: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.66.0 + with: + targets: wasm32-unknown-unknown + + - uses: Swatinem/rust-cache@v2 + with: + key: wasm32-unknown-unknown-noir-js + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Install jq + run: sudo apt-get install jq + + - name: Install wasm-bindgen-cli + uses: taiki-e/install-action@v2 + with: + tool: wasm-bindgen-cli@0.2.86 + + - name: Install toml2json + uses: taiki-e/install-action@v2 + with: + tool: toml2json@1.3.1 + + - name: Install wasm-opt + run: | + npm i wasm-opt -g + + - name: Build acvm_js + run: yarn workspace @noir-lang/acvm_js build + + - name: Build noirc_abi + run: yarn workspace @noir-lang/noirc_abi build + + - name: Run noir_js tests + run: | + yarn workspace @noir-lang/noir_js build + yarn workspace @noir-lang/noir_js test diff --git a/acvm-repo/acvm_js/.gitignore b/acvm-repo/acvm_js/.gitignore index d7d3d1dbe09..95efa89513e 100644 --- a/acvm-repo/acvm_js/.gitignore +++ b/acvm-repo/acvm_js/.gitignore @@ -1,14 +1,10 @@ /target -.DS_Store -examples/**/target/ -examples/9 -.vscode node_modules -pkg/ -lib/ + +# Build outputs result -.direnv -**/outputs +nodejs +web # Yarn .pnp.* @@ -17,4 +13,4 @@ result !.yarn/plugins !.yarn/releases !.yarn/sdks -!.yarn/versions \ No newline at end of file +!.yarn/versions diff --git a/acvm-repo/acvm_js/buildPhaseCargoCommand.sh b/acvm-repo/acvm_js/buildPhaseCargoCommand.sh index e61cca8bf1e..6c710bc938f 100755 --- a/acvm-repo/acvm_js/buildPhaseCargoCommand.sh +++ b/acvm-repo/acvm_js/buildPhaseCargoCommand.sh @@ -18,16 +18,19 @@ function run_if_available { export self_path=$(dirname "$(readlink -f "$0")") + +NODE_DIR=$self_path/nodejs/ +BROWSER_DIR=$self_path/web/ + # Clear out the existing build artifacts as these aren't automatically removed by wasm-pack. if [ -d ./pkg/ ]; then - rm -rf $self_path/pkg/ + rm -r $NODE_DIR + rm -r $BROWSER_DIR fi TARGET=wasm32-unknown-unknown WASM_BINARY=$CARGO_TARGET_DIR/$TARGET/release/${pname}.wasm -NODE_DIR=$self_path/pkg/nodejs/ -BROWSER_DIR=$self_path/pkg/web/ NODE_WASM=${NODE_DIR}/${pname}_bg.wasm BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm @@ -36,4 +39,4 @@ run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O -run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O \ No newline at end of file +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/acvm-repo/acvm_js/installPhase.sh b/acvm-repo/acvm_js/installPhase.sh index 847ba2ed90a..34ddb8155e1 100755 --- a/acvm-repo/acvm_js/installPhase.sh +++ b/acvm-repo/acvm_js/installPhase.sh @@ -1,14 +1,10 @@ #!/usr/bin/env bash export self_path=$(dirname "$(readlink -f "$0")") -mkdir -p $out -cp $self_path/README.md $out/ -cp -r $self_path/pkg/* $out/ +export out_path=$out/acvm_js -# The main package.json contains several keys which are incorrect/unwanted when distributing. -cat $self_path/package.json \ -| jq 'del(.private, .devDependencies, .scripts, .packageManager)' \ -> $out/package.json - -# Cleanup temporary pkg directory -rm -r $self_path/pkg \ No newline at end of file +mkdir -p $out_path +cp $self_path/README.md $out_path/ +cp $self_path/package.json $out_path/ +cp -r $self_path/nodejs $out_path/ +cp -r $self_path/web $out_path/ diff --git a/acvm-repo/acvm_js/nix/wasm-bindgen-cli/default.nix b/acvm-repo/acvm_js/nix/wasm-bindgen-cli/default.nix deleted file mode 100644 index 67225f05666..00000000000 --- a/acvm-repo/acvm_js/nix/wasm-bindgen-cli/default.nix +++ /dev/null @@ -1,37 +0,0 @@ -{ lib -, rustPlatform -, fetchCrate -, nodejs -, pkg-config -, openssl -, stdenv -, curl -, darwin -, runCommand -}: - -rustPlatform.buildRustPackage rec { - pname = "wasm-bindgen-cli"; - version = "0.2.86"; - - src = fetchCrate { - inherit pname version; - sha256 = "sha256-0u9bl+FkXEK2b54n7/l9JOCtKo+pb42GF9E1EnAUQa0="; - }; - - cargoSha256 = "sha256-AsZBtE2qHJqQtuCt/wCAgOoxYMfvDh8IzBPAOkYSYko="; - - nativeBuildInputs = [ pkg-config ]; - - buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ curl darwin.apple_sdk.frameworks.Security ]; - - doCheck = false; - - meta = with lib; { - homepage = "https://rustwasm.github.io/docs/wasm-bindgen/"; - license = with licenses; [ asl20 /* or */ mit ]; - description = "Facilitating high-level interactions between wasm modules and JavaScript"; - maintainers = with maintainers; [ nitsky rizary ]; - mainProgram = "wasm-bindgen"; - }; -} \ No newline at end of file diff --git a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index 280b08abed3..601deffc79e 100644 --- a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -7,7 +7,7 @@ import initACVM, { WitnessMap, initLogLevel, ForeignCallHandler, -} from "../../../result/"; +} from "@noir-lang/acvm_js"; beforeEach(async () => { await initACVM(); diff --git a/acvm-repo/acvm_js/test/browser/witness_conversion.test.ts b/acvm-repo/acvm_js/test/browser/witness_conversion.test.ts index 9fb04e86a2d..67c6c998923 100644 --- a/acvm-repo/acvm_js/test/browser/witness_conversion.test.ts +++ b/acvm-repo/acvm_js/test/browser/witness_conversion.test.ts @@ -1,5 +1,8 @@ import { expect } from "@esm-bundle/chai"; -import initACVM, { compressWitness, decompressWitness } from "../../../result/"; +import initACVM, { + compressWitness, + decompressWitness, +} from "@noir-lang/acvm_js"; import { expectedCompressedWitnessMap, expectedWitnessMap, diff --git a/acvm-repo/acvm_js/test/node/build_info.test.ts b/acvm-repo/acvm_js/test/node/build_info.test.ts index e2e3788887f..fcbdd9e45b7 100644 --- a/acvm-repo/acvm_js/test/node/build_info.test.ts +++ b/acvm-repo/acvm_js/test/node/build_info.test.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { BuildInfo, buildInfo } from "../../../result/"; +import { BuildInfo, buildInfo } from "@noir-lang/acvm_js"; import child_process from "child_process"; import pkg from "../../package.json"; diff --git a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index 0523d01b9ca..a9807b48aa7 100644 --- a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -6,7 +6,7 @@ import { WasmBlackBoxFunctionSolver, WitnessMap, ForeignCallHandler, -} from "../../../result/"; +} from "@noir-lang/acvm_js"; it("successfully executes circuit and extracts return value", async () => { const { bytecode, initialWitnessMap, resultWitness, expectedResult } = diff --git a/acvm-repo/acvm_js/test/node/witness_conversion.test.ts b/acvm-repo/acvm_js/test/node/witness_conversion.test.ts index 577fccfb1c0..86a716c6d70 100644 --- a/acvm-repo/acvm_js/test/node/witness_conversion.test.ts +++ b/acvm-repo/acvm_js/test/node/witness_conversion.test.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { compressWitness, decompressWitness } from "../../../result/"; +import { compressWitness, decompressWitness } from "@noir-lang/acvm_js"; import { expectedCompressedWitnessMap, expectedWitnessMap, diff --git a/acvm-repo/acvm_js/test/shared/addition.ts b/acvm-repo/acvm_js/test/shared/addition.ts index 790da506f6c..02b7d8457a8 100644 --- a/acvm-repo/acvm_js/test/shared/addition.ts +++ b/acvm-repo/acvm_js/test/shared/addition.ts @@ -1,4 +1,4 @@ -import { WitnessMap } from "../../../result/"; +import { WitnessMap } from "@noir-lang/acvm_js"; // See `addition_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ diff --git a/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts b/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts index 4d5a7f96e01..5fb6b2559e1 100644 --- a/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts +++ b/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts @@ -1,4 +1,4 @@ -import { WitnessMap } from "../../../result/"; +import { WitnessMap } from "@noir-lang/acvm_js"; // See `complex_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ diff --git a/acvm-repo/acvm_js/test/shared/foreign_call.ts b/acvm-repo/acvm_js/test/shared/foreign_call.ts index 038b64a5095..615f705f064 100644 --- a/acvm-repo/acvm_js/test/shared/foreign_call.ts +++ b/acvm-repo/acvm_js/test/shared/foreign_call.ts @@ -1,4 +1,4 @@ -import { WitnessMap } from "../../../result/"; +import { WitnessMap } from "@noir-lang/acvm_js"; // See `simple_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index dbb6dccaf75..a1f3b52bd86 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -29,8 +29,8 @@ "dev": "tsc-multi --watch", "build": "tsc-multi", "test": "yarn test:node:esm && yarn test:node:cjs", - "test:node:esm": "mocha --timeout 25000 --config ./.mocharc.json", - "test:node:cjs": "mocha --timeout 25000 --config ./.mocharc.cjs.json", + "test:node:esm": "mocha --timeout 25000 --exit --config ./.mocharc.json", + "test:node:cjs": "mocha --timeout 25000 --exit --config ./.mocharc.cjs.json", "prettier": "prettier 'src/**/*.ts'", "prettier:fix": "prettier --write 'src/**/*.ts' 'test/**/*.ts'", "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" diff --git a/tooling/noir_js/src/witness_generation.ts b/tooling/noir_js/src/witness_generation.ts index bce95ba0c5c..c117adcf087 100644 --- a/tooling/noir_js/src/witness_generation.ts +++ b/tooling/noir_js/src/witness_generation.ts @@ -1,11 +1,11 @@ import { abiEncode } from '@noir-lang/noirc_abi'; import { validateInputs } from './input_validation.js'; import { base64Decode } from './base64_decode.js'; -import { WitnessMap, executeCircuit } from '@noir-lang/acvm_js'; +import { executeCircuit } from '@noir-lang/acvm_js'; import { witnessMapToUint8Array } from './serialize.js'; // Generates the witnesses needed to feed into the chosen proving system -export async function generateWitness(compiledProgram, inputs): Promise { +export async function generateWitness(compiledProgram, inputs): Promise { // Validate inputs const { isValid, error } = validateInputs(inputs, compiledProgram.abi); if (!isValid) { From 340386ad379b21ec1fb3185a0088961040d4bd4a Mon Sep 17 00:00:00 2001 From: Yordan Madzhunkov <52652109+yordanmadzhunkov@users.noreply.github.com> Date: Tue, 26 Sep 2023 17:54:05 +0300 Subject: [PATCH 2/2] chore(errors): Refactor errors representation in compiler (#2760) Co-authored-by: Yordan Madzhunkov --- compiler/noirc_driver/src/lib.rs | 6 +- compiler/noirc_errors/src/reporter.rs | 2 +- .../src/hir/def_collector/dc_crate.rs | 328 +++++++++++------- .../src/hir/def_collector/dc_mod.rs | 296 ++++++++-------- .../src/hir/def_collector/errors.rs | 24 +- .../src/hir/def_collector/mod.rs | 2 +- .../src/hir/def_map/aztec_library.rs | 36 +- .../noirc_frontend/src/hir/def_map/mod.rs | 39 ++- .../src/hir/resolution/resolver.rs | 9 +- compiler/noirc_frontend/src/parser/parser.rs | 12 +- 10 files changed, 440 insertions(+), 314 deletions(-) diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 14be5a9f4aa..60073f9cc84 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -112,7 +112,11 @@ pub fn check_crate( deny_warnings: bool, ) -> CompilationResult<()> { let mut errors = vec![]; - CrateDefMap::collect_defs(crate_id, context, &mut errors); + let diagnostics = CrateDefMap::collect_defs(crate_id, context); + errors.extend(diagnostics.into_iter().map(|(error, file_id)| { + let diagnostic: CustomDiagnostic = error.into(); + diagnostic.in_file(file_id) + })); if has_errors(&errors, deny_warnings) { Err(errors) diff --git a/compiler/noirc_errors/src/reporter.rs b/compiler/noirc_errors/src/reporter.rs index d695b2007bc..ba5ac450f56 100644 --- a/compiler/noirc_errors/src/reporter.rs +++ b/compiler/noirc_errors/src/reporter.rs @@ -99,7 +99,7 @@ impl std::fmt::Display for CustomDiagnostic { #[derive(Debug, Clone, PartialEq, Eq)] pub struct CustomLabel { - message: String, + pub message: String, pub span: Span, } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 0e4c1710ce8..0a4b3bc2a70 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -15,6 +15,9 @@ use crate::hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType} use crate::node_interner::{ FuncId, NodeInterner, StmtId, StructId, TraitId, TraitImplKey, TypeAliasId, }; + +use crate::parser::ParserError; + use crate::{ ExpressionKind, Generics, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, NoirTypeAlias, ParsedModule, Shared, StructType, TraitItem, Type, TypeBinding, @@ -22,8 +25,8 @@ use crate::{ }; use fm::FileId; use iter_extended::vecmap; +use noirc_errors::CustomDiagnostic; use noirc_errors::Span; -use noirc_errors::{CustomDiagnostic, FileDiagnostic}; use std::collections::{BTreeMap, HashMap}; use std::rc::Rc; use std::vec; @@ -90,6 +93,47 @@ pub struct DefCollector { pub(crate) collected_traits_impls: TraitImplMap, } +pub enum CompilationError { + ParseError(ParserError), + DefinitionError(DefCollectorErrorKind), + ResolveError(ResolverError), + TypeError(TypeCheckError), +} + +impl From for CustomDiagnostic { + fn from(value: CompilationError) -> Self { + match value { + CompilationError::ParseError(error) => error.into(), + CompilationError::DefinitionError(error) => error.into(), + CompilationError::ResolveError(error) => error.into(), + CompilationError::TypeError(error) => error.into(), + } + } +} + +impl From for CompilationError { + fn from(value: ParserError) -> Self { + CompilationError::ParseError(value) + } +} + +impl From for CompilationError { + fn from(value: DefCollectorErrorKind) -> Self { + CompilationError::DefinitionError(value) + } +} + +impl From for CompilationError { + fn from(value: ResolverError) -> Self { + CompilationError::ResolveError(value) + } +} +impl From for CompilationError { + fn from(value: TypeCheckError) -> Self { + CompilationError::TypeError(value) + } +} + /// Maps the type and the module id in which the impl is defined to the functions contained in that /// impl along with the generics declared on the impl itself. This also contains the Span /// of the object_type of the impl, used to issue an error if the object type fails to resolve. @@ -125,8 +169,8 @@ impl DefCollector { context: &mut Context, ast: ParsedModule, root_file_id: FileId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; let crate_id = def_map.krate; // Recursively resolve the dependencies @@ -137,7 +181,7 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - CrateDefMap::collect_defs(dep.crate_id, context, errors); + errors.extend(CrateDefMap::collect_defs(dep.crate_id, context)); let dep_def_root = context.def_map(&dep.crate_id).expect("ice: def map was just created").root; @@ -156,7 +200,14 @@ impl DefCollector { // and lowering the functions // i.e. Use a mod collector to collect the nodes at the root module // and process them - collect_defs(&mut def_collector, ast, root_file_id, crate_root, crate_id, context, errors); + errors.extend(collect_defs( + &mut def_collector, + ast, + root_file_id, + crate_root, + crate_id, + context, + )); // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); @@ -165,13 +216,14 @@ impl DefCollector { let (resolved, unresolved_imports) = resolve_imports(crate_id, def_collector.collected_imports, &context.def_maps); - let current_def_map = context.def_maps.get(&crate_id).unwrap(); - - errors.extend(vecmap(unresolved_imports, |(error, module_id)| { - let file_id = current_def_map.file_id(module_id); - let error = DefCollectorErrorKind::PathResolutionError(error); - error.into_file_diagnostic(file_id) - })); + { + let current_def_map = context.def_maps.get(&crate_id).unwrap(); + errors.extend(vecmap(unresolved_imports, |(error, module_id)| { + let file_id = current_def_map.file_id(module_id); + let error = DefCollectorErrorKind::PathResolutionError(error); + (error.into(), file_id) + })); + }; // Populate module namespaces according to the imports used let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); @@ -187,7 +239,7 @@ impl DefCollector { first_def, second_def, }; - errors.push(err.into_file_diagnostic(root_file_id)); + errors.push((err.into(), root_file_id)); } } } @@ -200,27 +252,33 @@ impl DefCollector { let (literal_globals, other_globals) = filter_literal_globals(def_collector.collected_globals); - let mut file_global_ids = resolve_globals(context, literal_globals, crate_id, errors); + let mut resolved_globals = resolve_globals(context, literal_globals, crate_id); - resolve_type_aliases(context, def_collector.collected_type_aliases, crate_id, errors); + errors.extend(resolve_type_aliases( + context, + def_collector.collected_type_aliases, + crate_id, + )); - resolve_traits(context, def_collector.collected_traits, crate_id, errors); + errors.extend(resolve_traits(context, def_collector.collected_traits, crate_id)); // Must resolve structs before we resolve globals. - resolve_structs(context, def_collector.collected_types, crate_id, errors); + errors.extend(resolve_structs(context, def_collector.collected_types, crate_id)); // We must wait to resolve non-integer globals until after we resolve structs since structs // globals will need to reference the struct type they're initialized to to ensure they are valid. - let mut more_global_ids = resolve_globals(context, other_globals, crate_id, errors); - - file_global_ids.append(&mut more_global_ids); + resolved_globals.extend(resolve_globals(context, other_globals, crate_id)); // Before we resolve any function symbols we must go through our impls and // re-collect the methods within into their proper module. This cannot be // done before resolution since we need to be able to resolve the type of the // impl since that determines the module we should collect into. - collect_impls(context, crate_id, &def_collector.collected_impls, errors); + errors.extend(collect_impls(context, crate_id, &def_collector.collected_impls)); - collect_trait_impls(context, crate_id, &def_collector.collected_traits_impls, errors); + errors.extend(collect_trait_impls( + context, + crate_id, + &def_collector.collected_traits_impls, + )); // Lower each function in the crate. This is now possible since imports have been resolved let file_func_ids = resolve_free_functions( @@ -229,7 +287,7 @@ impl DefCollector { &context.def_maps, def_collector.collected_functions, None, - errors, + &mut errors, ); let file_method_ids = resolve_impls( @@ -237,18 +295,24 @@ impl DefCollector { crate_id, &context.def_maps, def_collector.collected_impls, - errors, + &mut errors, + ); + // resolve_trait_impls can fill different type of errors, therefore we pass errors by mut ref + let file_trait_impls_ids = resolve_trait_impls( + context, + def_collector.collected_traits_impls, + crate_id, + &mut errors, ); - let file_trait_impls_ids = - resolve_trait_impls(context, def_collector.collected_traits_impls, crate_id, errors); - - type_check_globals(&mut context.def_interner, file_global_ids, errors); + errors.extend(resolved_globals.errors); + errors.extend(type_check_globals(&mut context.def_interner, resolved_globals.globals)); // Type check all of the functions in the crate - type_check_functions(&mut context.def_interner, file_func_ids, errors); - type_check_functions(&mut context.def_interner, file_method_ids, errors); - type_check_functions(&mut context.def_interner, file_trait_impls_ids, errors); + errors.extend(type_check_functions(&mut context.def_interner, file_func_ids)); + errors.extend(type_check_functions(&mut context.def_interner, file_method_ids)); + errors.extend(type_check_functions(&mut context.def_interner, file_trait_impls_ids)); + errors } } @@ -258,10 +322,10 @@ fn collect_impls( context: &mut Context, crate_id: CrateId, collected_impls: &ImplMap, - errors: &mut Vec, -) { +) -> Vec<(CompilationError, FileId)> { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; + let mut errors: Vec<(CompilationError, FileId)> = vec![]; for ((unresolved_type, module_id), methods) in collected_impls { let path_resolver = @@ -274,7 +338,9 @@ fn collect_impls( resolver.add_generics(generics); let typ = resolver.resolve_type(unresolved_type.clone()); - extend_errors(errors, unresolved.file_id, resolver.take_errors()); + errors.extend( + resolver.take_errors().iter().cloned().map(|e| (e.into(), unresolved.file_id)), + ); if let Some(struct_type) = get_struct_type(&typ) { let struct_type = struct_type.borrow(); @@ -285,7 +351,7 @@ fn collect_impls( let span = *span; let type_name = struct_type.name.to_string(); let error = DefCollectorErrorKind::ForeignImpl { span, type_name }; - errors.push(error.into_file_diagnostic(unresolved.file_id)); + errors.push((error.into(), unresolved.file_id)); continue; } @@ -298,32 +364,33 @@ fn collect_impls( let result = module.declare_function(method.name_ident().clone(), *method_id); if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { + let error = DefCollectorErrorKind::Duplicate { typ: DuplicateType::Function, first_def, second_def, }; - errors.push(err.into_file_diagnostic(unresolved.file_id)); + errors.push((error.into(), unresolved.file_id)); } } // Prohibit defining impls for primitive types if we're not in the stdlib } else if typ != Type::Error && !crate_id.is_stdlib() { let span = *span; let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; - errors.push(error.into_file_diagnostic(unresolved.file_id)); + errors.push((error.into(), unresolved.file_id)); } } } + errors } fn collect_trait_impls( context: &mut Context, crate_id: CrateId, collected_impls: &TraitImplMap, - errors: &mut Vec, -) { +) -> Vec<(CompilationError, FileId)> { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; + let mut errors: Vec<(CompilationError, FileId)> = vec![]; // TODO: To follow the semantics of Rust, we must allow the impl if either // 1. The type is a struct and it's defined in the current crate @@ -341,7 +408,9 @@ fn collect_trait_impls( // Add the method to the struct's namespace if let Some(struct_type) = get_struct_type(&typ) { - extend_errors(errors, trait_impl.file_id, resolver.take_errors()); + errors.extend( + resolver.take_errors().iter().cloned().map(|e| (e.into(), trait_impl.file_id)), + ); let struct_type = struct_type.borrow(); let type_module = struct_type.id.local_module_id(); @@ -356,16 +425,17 @@ fn collect_trait_impls( first_def, second_def, }; - errors.push(err.into_file_diagnostic(trait_impl.file_id)); + errors.push((err.into(), trait_impl.file_id)); } } else { let span = trait_impl.trait_impl_ident.span(); let trait_ident = trait_impl.the_trait.trait_def.name.clone(); let error = DefCollectorErrorKind::NonStructTraitImpl { trait_ident, span }; - errors.push(error.into_file_diagnostic(trait_impl.file_id)); + errors.push((error.into(), trait_impl.file_id)); } } } + errors } fn get_struct_type(typ: &Type) -> Option<&Shared> { @@ -375,14 +445,6 @@ fn get_struct_type(typ: &Type) -> Option<&Shared> { } } -fn extend_errors(errors: &mut Vec, file: fm::FileId, new_errors: Errs) -where - Errs: IntoIterator, - Err: Into, -{ - errors.extend(new_errors.into_iter().map(|err| err.into().in_file(file))); -} - /// Separate the globals Vec into two. The first element in the tuple will be the /// literal globals, except for arrays, and the second will be all other globals. /// We exclude array literals as they can contain complex types @@ -395,13 +457,25 @@ fn filter_literal_globals( }) } +pub struct ResolvedGlobals { + pub globals: Vec<(FileId, StmtId)>, + pub errors: Vec<(CompilationError, FileId)>, +} + +impl ResolvedGlobals { + pub fn extend(&mut self, oth: Self) { + self.globals.extend(oth.globals); + self.errors.extend(oth.errors); + } +} + fn resolve_globals( context: &mut Context, globals: Vec, crate_id: CrateId, - errors: &mut Vec, -) -> Vec<(FileId, StmtId)> { - vecmap(globals, |global| { +) -> ResolvedGlobals { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + let globals = vecmap(globals, |global| { let module_id = ModuleId { local_id: global.module_id, krate: crate_id }; let path_resolver = StandardPathResolver::new(module_id); let storage_slot = context.next_storage_slot(module_id); @@ -416,25 +490,47 @@ fn resolve_globals( let name = global.stmt_def.pattern.name_ident().clone(); let hir_stmt = resolver.resolve_global_let(global.stmt_def); - extend_errors(errors, global.file_id, resolver.take_errors()); + errors.extend(resolver.take_errors().iter().cloned().map(|e| (e.into(), global.file_id))); context.def_interner.update_global(global.stmt_id, hir_stmt); context.def_interner.push_global(global.stmt_id, name, global.module_id, storage_slot); (global.file_id, global.stmt_id) - }) + }); + ResolvedGlobals { globals, errors } } fn type_check_globals( interner: &mut NodeInterner, global_ids: Vec<(FileId, StmtId)>, - all_errors: &mut Vec, -) { - for (file_id, stmt_id) in global_ids { - let errors = TypeChecker::check_global(&stmt_id, interner); - extend_errors(all_errors, file_id, errors); - } +) -> Vec<(CompilationError, fm::FileId)> { + global_ids + .iter() + .flat_map(|(file_id, stmt_id)| { + TypeChecker::check_global(stmt_id, interner) + .iter() + .cloned() + .map(|e| (e.into(), *file_id)) + .collect::>() + }) + .collect() +} + +fn type_check_functions( + interner: &mut NodeInterner, + file_func_ids: Vec<(FileId, FuncId)>, +) -> Vec<(CompilationError, fm::FileId)> { + file_func_ids + .iter() + .flat_map(|(file, func)| { + type_check_func(interner, *func) + .iter() + .cloned() + .map(|e| (e.into(), *file)) + .collect::>() + }) + .collect() } /// Create the mappings from TypeId -> StructType @@ -443,36 +539,37 @@ fn resolve_structs( context: &mut Context, structs: BTreeMap, crate_id: CrateId, - errors: &mut Vec, -) { +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; // Resolve each field in each struct. // Each struct should already be present in the NodeInterner after def collection. for (type_id, typ) in structs { - let (generics, fields) = resolve_struct_fields(context, crate_id, typ, errors); + let file_id = typ.file_id; + let (generics, fields, resolver_errors) = resolve_struct_fields(context, crate_id, typ); + errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); context.def_interner.update_struct(type_id, |struct_def| { struct_def.set_fields(fields); struct_def.generics = generics; }); } + errors } fn resolve_trait_types( _context: &mut Context, _crate_id: CrateId, _unresolved_trait: &UnresolvedTrait, - _errors: &mut [FileDiagnostic], -) -> Vec { +) -> (Vec, Vec<(CompilationError, FileId)>) { // TODO - vec![] + (vec![], vec![]) } fn resolve_trait_constants( _context: &mut Context, _crate_id: CrateId, _unresolved_trait: &UnresolvedTrait, - _errors: &mut [FileDiagnostic], -) -> Vec { +) -> (Vec, Vec<(CompilationError, FileId)>) { // TODO - vec![] + (vec![], vec![]) } fn resolve_trait_methods( @@ -480,8 +577,7 @@ fn resolve_trait_methods( trait_id: TraitId, crate_id: CrateId, unresolved_trait: &UnresolvedTrait, - errors: &mut Vec, -) -> Vec { +) -> (Vec, Vec<(CompilationError, FileId)>) { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; @@ -492,7 +588,7 @@ fn resolve_trait_methods( let file = def_maps[&crate_id].file_id(unresolved_trait.module_id); let mut res = vec![]; - + let mut resolver_errors = vec![]; for item in &unresolved_trait.trait_def.items { if let TraitItem::Function { name, @@ -527,14 +623,16 @@ fn resolve_trait_methods( span, }; res.push(f); - let new_errors = take_errors_filter_self_not_resolved(resolver); - extend_errors(errors, file, new_errors); + resolver_errors.extend(take_errors_filter_self_not_resolved(file, resolver)); } } - res + (res, resolver_errors) } -fn take_errors_filter_self_not_resolved(resolver: Resolver<'_>) -> Vec { +fn take_errors_filter_self_not_resolved( + file_id: FileId, + resolver: Resolver<'_>, +) -> Vec<(CompilationError, FileId)> { resolver .take_errors() .iter() @@ -545,6 +643,7 @@ fn take_errors_filter_self_not_resolved(resolver: Resolver<'_>) -> Vec true, }) .cloned() + .map(|resolution_error| (resolution_error.into(), file_id)) .collect() } @@ -554,64 +653,61 @@ fn resolve_traits( context: &mut Context, traits: BTreeMap, crate_id: CrateId, - errors: &mut Vec, -) { +) -> Vec<(CompilationError, FileId)> { for (trait_id, unresolved_trait) in &traits { context.def_interner.push_empty_trait(*trait_id, unresolved_trait); } + let mut res: Vec<(CompilationError, FileId)> = vec![]; for (trait_id, unresolved_trait) in traits { // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) - let _ = resolve_trait_types(context, crate_id, &unresolved_trait, errors); + let _ = resolve_trait_types(context, crate_id, &unresolved_trait); // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) - let _ = resolve_trait_constants(context, crate_id, &unresolved_trait, errors); + let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); // 3. Trait Methods - let methods = resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait, errors); - + let (methods, errors) = + resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait); + res.extend(errors); context.def_interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); }); } + res } fn resolve_struct_fields( context: &mut Context, krate: CrateId, unresolved: UnresolvedStruct, - all_errors: &mut Vec, -) -> (Generics, Vec<(Ident, Type)>) { +) -> (Generics, Vec<(Ident, Type)>, Vec) { let path_resolver = StandardPathResolver::new(ModuleId { local_id: unresolved.module_id, krate }); - - let file = unresolved.file_id; - + let file_id = unresolved.file_id; let (generics, fields, errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) + Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) .resolve_struct_fields(unresolved.struct_def); - - extend_errors(all_errors, unresolved.file_id, errors); - (generics, fields) + (generics, fields, errors) } fn resolve_type_aliases( context: &mut Context, type_aliases: BTreeMap, crate_id: CrateId, - all_errors: &mut Vec, -) { +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; for (type_id, unresolved_typ) in type_aliases { let path_resolver = StandardPathResolver::new(ModuleId { local_id: unresolved_typ.module_id, krate: crate_id, }); let file = unresolved_typ.file_id; - let (typ, generics, errors) = + let (typ, generics, resolver_errors) = Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) .resolve_type_aliases(unresolved_typ.type_alias_def); - extend_errors(all_errors, file, errors); - + errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); context.def_interner.set_type_alias(type_id, typ, generics); } + errors } fn resolve_impls( @@ -619,7 +715,7 @@ fn resolve_impls( crate_id: CrateId, def_maps: &BTreeMap, collected_impls: ImplMap, - errors: &mut Vec, + errors: &mut Vec<(CompilationError, FileId)>, ) -> Vec<(FileId, FuncId)> { let mut file_method_ids = Vec::new(); @@ -656,8 +752,7 @@ fn resolve_impls( first_span: interner.function_ident(&first_fn).span(), second_span: interner.function_ident(method_id).span(), }; - - errors.push(error.into_file_diagnostic(*file_id)); + errors.push((error.into(), *file_id)); } } } @@ -672,7 +767,7 @@ fn resolve_trait_impls( context: &mut Context, traits: TraitImplMap, crate_id: CrateId, - errors: &mut Vec, + errors: &mut Vec<(CompilationError, FileId)>, ) -> Vec<(FileId, FuncId)> { let interner = &mut context.def_interner; let mut methods = Vec::<(FileId, FuncId)>::new(); @@ -720,7 +815,7 @@ fn resolve_trait_impls( first_def: prev_trait_impl_ident.borrow().ident.clone(), second_def: trait_definition_ident.clone(), }; - errors.push(err.into_file_diagnostic(trait_impl.methods.file_id)); + errors.push((err.into(), trait_impl.methods.file_id)); } else { interner.add_trait_implementation(&key, resolved_trait_impl); } @@ -736,7 +831,7 @@ fn check_methods_signatures( resolver: &mut Resolver, impl_methods: &Vec<(FileId, FuncId)>, trait_id: TraitId, - errors: &mut Vec, + errors: &mut Vec<(CompilationError, FileId)>, ) { let the_trait_shared = resolver.interner.get_trait(trait_id); let the_trait = the_trait_shared.borrow(); @@ -777,7 +872,7 @@ fn check_methods_signatures( }); } } else { - errors.push( + errors.push(( DefCollectorErrorKind::MismatchTraitImplementationNumParameters { actual_num_parameters: meta.parameters.0.len(), expected_num_parameters: method.arguments.len(), @@ -785,8 +880,9 @@ fn check_methods_signatures( method_name: func_name.to_string(), span: meta.location.span, } - .into_file_diagnostic(*file_id), - ); + .into(), + *file_id, + )); } } @@ -805,7 +901,7 @@ fn check_methods_signatures( } }); - extend_errors(errors, *file_id, typecheck_errors); + errors.extend(typecheck_errors.iter().cloned().map(|e| (e.into(), *file_id))); } } @@ -818,7 +914,7 @@ fn resolve_free_functions( def_maps: &BTreeMap, collected_functions: Vec, self_type: Option, - errors: &mut Vec, + errors: &mut Vec<(CompilationError, FileId)>, ) -> Vec<(FileId, FuncId)> { // Lower each function in the crate. This is now possible since imports have been resolved collected_functions @@ -844,7 +940,7 @@ fn resolve_function_set( unresolved_functions: UnresolvedFunctions, self_type: Option, impl_generics: Vec<(Rc, Shared, Span)>, - errors: &mut Vec, + errors: &mut Vec<(CompilationError, FileId)>, ) -> Vec<(FileId, FuncId)> { let file_id = unresolved_functions.file_id; @@ -862,17 +958,7 @@ fn resolve_function_set( let (hir_func, func_meta, errs) = resolver.resolve_function(func, func_id); interner.push_fn_meta(func_meta, func_id); interner.update_fn(func_id, hir_func); - extend_errors(errors, file_id, errs); + errors.extend(errs.iter().cloned().map(|e| (e.into(), file_id))); (file_id, func_id) }) } - -fn type_check_functions( - interner: &mut NodeInterner, - file_func_ids: Vec<(FileId, FuncId)>, - errors: &mut Vec, -) { - for (file, func) in file_func_ids { - extend_errors(errors, file, type_check_func(interner, func)); - } -} diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 53c8947d046..99b7ed9a7f6 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -1,7 +1,7 @@ -use std::collections::HashSet; +use std::{collections::HashSet, vec}; use fm::FileId; -use noirc_errors::{CustomDiagnostic, FileDiagnostic, Location}; +use noirc_errors::Location; use crate::{ graph::CrateId, @@ -18,7 +18,7 @@ use crate::{ use super::{ dc_crate::{ - DefCollector, UnresolvedFunctions, UnresolvedGlobal, UnresolvedTraitImpl, + CompilationError, DefCollector, UnresolvedFunctions, UnresolvedGlobal, UnresolvedTraitImpl, UnresolvedTypeAlias, }, errors::{DefCollectorErrorKind, DuplicateType}, @@ -44,16 +44,15 @@ pub fn collect_defs( module_id: LocalModuleId, crate_id: CrateId, context: &mut Context, - errors: &mut Vec, -) { +) -> Vec<(CompilationError, FileId)> { let mut collector = ModCollector { def_collector, file_id, module_id }; - + let mut errors: Vec<(CompilationError, FileId)> = vec![]; // First resolve the module declarations for decl in ast.module_decls { - collector.parse_module_declaration(context, &decl, crate_id, errors); + errors.extend(collector.parse_module_declaration(context, &decl, crate_id)); } - collector.collect_submodules(context, crate_id, ast.submodules, file_id, errors); + errors.extend(collector.collect_submodules(context, crate_id, ast.submodules, file_id)); // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved for import in ast.imports { @@ -64,19 +63,21 @@ pub fn collect_defs( }); } - collector.collect_globals(context, ast.globals, errors); + errors.extend(collector.collect_globals(context, ast.globals)); - collector.collect_traits(ast.traits, crate_id, errors); + errors.extend(collector.collect_traits(ast.traits, crate_id)); - collector.collect_structs(context, ast.types, crate_id, errors); + errors.extend(collector.collect_structs(context, ast.types, crate_id)); - collector.collect_type_aliases(context, ast.type_aliases, errors); + errors.extend(collector.collect_type_aliases(context, ast.type_aliases)); - collector.collect_functions(context, ast.functions, crate_id, errors); + errors.extend(collector.collect_functions(context, ast.functions, crate_id)); - collector.collect_trait_impls(context, ast.trait_impls, crate_id, errors); + errors.extend(collector.collect_trait_impls(context, ast.trait_impls, crate_id)); collector.collect_impls(context, ast.impls, crate_id); + + errors } impl<'a> ModCollector<'a> { @@ -84,8 +85,8 @@ impl<'a> ModCollector<'a> { &mut self, context: &mut Context, globals: Vec, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, fm::FileId)> { + let mut errors = vec![]; for global in globals { let name = global.pattern.name_ident().clone(); @@ -103,7 +104,7 @@ impl<'a> ModCollector<'a> { first_def, second_def, }; - errors.push(err.into_file_diagnostic(self.file_id)); + errors.push((err.into(), self.file_id)); } self.def_collector.collected_globals.push(UnresolvedGlobal { @@ -113,6 +114,7 @@ impl<'a> ModCollector<'a> { stmt_def: global, }); } + errors } fn collect_impls(&mut self, context: &mut Context, impls: Vec, krate: CrateId) { @@ -139,65 +141,64 @@ impl<'a> ModCollector<'a> { context: &mut Context, impls: Vec, krate: CrateId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, fm::FileId)> { let module_id = ModuleId { krate, local_id: self.module_id }; + let mut diagnostics: Vec<(CompilationError, fm::FileId)> = vec![]; for trait_impl in impls { let trait_name = &trait_impl.trait_name; let module = &self.def_collector.def_map.modules[self.module_id.0]; - if let Some(trait_id) = self.find_trait_or_emit_error(module, trait_name, errors) { - let collected_trait = - self.def_collector.collected_traits.get(&trait_id).cloned().unwrap(); - - let unresolved_functions = self.collect_trait_implementations( - context, - &trait_impl, - &collected_trait.trait_def, - krate, - errors, - ); - - for (_, func_id, noir_function) in &unresolved_functions.functions { - let function = &noir_function.def; - context.def_interner.push_function(*func_id, function, module_id); - } + match self.find_trait_or_emit_error(module, trait_name) { + Ok(trait_id) => { + let collected_trait = + self.def_collector.collected_traits.get(&trait_id).cloned().unwrap(); + let unresolved_functions = self.collect_trait_implementations( + context, + &trait_impl, + &collected_trait.trait_def, + krate, + &mut diagnostics, + ); + + for (_, func_id, noir_function) in &unresolved_functions.functions { + let function = &noir_function.def; + context.def_interner.push_function(*func_id, function, module_id); + } - let unresolved_trait_impl = UnresolvedTraitImpl { - file_id: self.file_id, - module_id: self.module_id, - the_trait: collected_trait, - methods: unresolved_functions, - trait_impl_ident: trait_impl.trait_name.clone(), - }; + let unresolved_trait_impl = UnresolvedTraitImpl { + file_id: self.file_id, + module_id: self.module_id, + the_trait: collected_trait, + methods: unresolved_functions, + trait_impl_ident: trait_impl.trait_name.clone(), + }; - let key = (trait_impl.object_type, self.module_id, trait_id); - self.def_collector.collected_traits_impls.insert(key, unresolved_trait_impl); + let key = (trait_impl.object_type, self.module_id, trait_id); + self.def_collector.collected_traits_impls.insert(key, unresolved_trait_impl); + } + Err((err, file_id)) => diagnostics.push((err.into(), file_id)), } } + + diagnostics } fn find_trait_or_emit_error( &self, module: &ModuleData, trait_name: &Ident, - errors: &mut Vec, - ) -> Option { + ) -> Result { match module.find_trait_with_name(trait_name) { - Ok(trait_id) => Some(trait_id), - Err(ScopeResolveError::WrongKind) => { - let error = - DefCollectorErrorKind::NotATrait { not_a_trait_name: trait_name.clone() }; - errors.push(error.into_file_diagnostic(self.file_id)); - None - } - Err(ScopeResolveError::NotFound) => { - let error = - DefCollectorErrorKind::TraitNotFound { trait_ident: trait_name.clone() }; - errors.push(error.into_file_diagnostic(self.file_id)); - None - } + Ok(trait_id) => Ok(trait_id), + Err(ScopeResolveError::WrongKind) => Err(( + DefCollectorErrorKind::NotATrait { not_a_trait_name: trait_name.clone() }, + self.file_id, + )), + Err(ScopeResolveError::NotFound) => Err(( + DefCollectorErrorKind::TraitNotFound { trait_ident: trait_name.clone() }, + self.file_id, + )), } } @@ -207,7 +208,7 @@ impl<'a> ModCollector<'a> { trait_impl: &NoirTraitImpl, trait_def: &NoirTrait, krate: CrateId, - errors: &mut Vec, + diagnostics: &mut Vec<(CompilationError, FileId)>, ) -> UnresolvedFunctions { let mut unresolved_functions = UnresolvedFunctions { file_id: self.file_id, functions: Vec::new() }; @@ -284,7 +285,7 @@ impl<'a> ModCollector<'a> { method_name: name.clone(), trait_impl_span: trait_impl.object_type_span, }; - errors.push(error.into_file_diagnostic(self.file_id)); + diagnostics.push((error.into(), self.file_id)); } } } else { @@ -303,7 +304,7 @@ impl<'a> ModCollector<'a> { trait_name: trait_def.name.clone(), impl_method: func.name_ident().clone(), }; - errors.push(error.into_file_diagnostic(self.file_id)); + diagnostics.push((error.into(), self.file_id)); } } @@ -315,10 +316,10 @@ impl<'a> ModCollector<'a> { context: &mut Context, functions: Vec, krate: CrateId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { let mut unresolved_functions = UnresolvedFunctions { file_id: self.file_id, functions: Vec::new() }; + let mut errors = vec![]; let module = ModuleId { krate, local_id: self.module_id }; @@ -333,13 +334,11 @@ impl<'a> ModCollector<'a> { // Then go over the where clause and assign trait_ids to the constraints for constraint in &mut function.def.where_clause { let module = &self.def_collector.def_map.modules[self.module_id.0]; - - if let Some(trait_id) = self.find_trait_or_emit_error( - module, - &constraint.trait_bound.trait_name, - errors, - ) { - constraint.trait_bound.trait_id = Some(trait_id); + match self.find_trait_or_emit_error(module, &constraint.trait_bound.trait_name) { + Ok(trait_id) => { + constraint.trait_bound.trait_id = Some(trait_id); + } + Err((err, file_id)) => errors.push((err.into(), file_id)), } } @@ -361,11 +360,12 @@ impl<'a> ModCollector<'a> { first_def, second_def, }; - errors.push(error.into_file_diagnostic(self.file_id)); + errors.push((error.into(), self.file_id)); } } self.def_collector.collected_functions.push(unresolved_functions); + errors } /// Collect any struct definitions declared within the ast. @@ -375,8 +375,8 @@ impl<'a> ModCollector<'a> { context: &mut Context, types: Vec, krate: CrateId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { + let mut definiton_errors = vec![]; for struct_definition in types { let name = struct_definition.name.clone(); @@ -387,9 +387,12 @@ impl<'a> ModCollector<'a> { }; // Create the corresponding module for the struct namespace - let id = match self.push_child_module(&name, self.file_id, false, false, errors) { - Some(local_id) => context.def_interner.new_struct(&unresolved, krate, local_id), - None => continue, + let id = match self.push_child_module(&name, self.file_id, false, false) { + Ok(local_id) => context.def_interner.new_struct(&unresolved, krate, local_id), + Err(error) => { + definiton_errors.push((error.into(), self.file_id)); + continue; + } }; // Add the struct to scope so its path can be looked up later @@ -397,17 +400,18 @@ impl<'a> ModCollector<'a> { self.def_collector.def_map.modules[self.module_id.0].declare_struct(name, id); if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { + let error = DefCollectorErrorKind::Duplicate { typ: DuplicateType::TypeDefinition, first_def, second_def, }; - errors.push(err.into_file_diagnostic(self.file_id)); + definiton_errors.push((error.into(), self.file_id)); } // And store the TypeId -> StructType mapping somewhere it is reachable self.def_collector.collected_types.insert(id, unresolved); } + definiton_errors } /// Collect any type aliases definitions declared within the ast. @@ -416,8 +420,8 @@ impl<'a> ModCollector<'a> { &mut self, context: &mut Context, type_aliases: Vec, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; for type_alias in type_aliases { let name = type_alias.name.clone(); @@ -440,11 +444,12 @@ impl<'a> ModCollector<'a> { first_def, second_def, }; - errors.push(err.into_file_diagnostic(self.file_id)); + errors.push((err.into(), self.file_id)); } self.def_collector.collected_type_aliases.insert(type_alias_id, unresolved); } + errors } /// Collect any traits definitions declared within the ast. @@ -453,15 +458,18 @@ impl<'a> ModCollector<'a> { &mut self, traits: Vec, krate: CrateId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; for trait_definition in traits { let name = trait_definition.name.clone(); // Create the corresponding module for the trait namespace - let id = match self.push_child_module(&name, self.file_id, false, false, errors) { - Some(local_id) => TraitId(ModuleId { krate, local_id }), - None => continue, + let id = match self.push_child_module(&name, self.file_id, false, false) { + Ok(local_id) => TraitId(ModuleId { krate, local_id }), + Err(error) => { + errors.push((error.into(), self.file_id)); + continue; + } }; // Add the trait to scope so its path can be looked up later @@ -469,12 +477,12 @@ impl<'a> ModCollector<'a> { self.def_collector.def_map.modules[self.module_id.0].declare_trait(name, id); if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { + let error = DefCollectorErrorKind::Duplicate { typ: DuplicateType::Trait, first_def, second_def, }; - errors.push(err.into_file_diagnostic(self.file_id)); + errors.push((error.into(), self.file_id)); } // And store the TraitId -> TraitType mapping somewhere it is reachable @@ -485,6 +493,7 @@ impl<'a> ModCollector<'a> { }; self.def_collector.collected_traits.insert(id, unresolved); } + errors } fn collect_submodules( @@ -493,27 +502,26 @@ impl<'a> ModCollector<'a> { crate_id: CrateId, submodules: Vec, file_id: FileId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; for submodule in submodules { - if let Some(child) = self.push_child_module( - &submodule.name, - file_id, - true, - submodule.is_contract, - errors, - ) { - collect_defs( - self.def_collector, - submodule.contents, - file_id, - child, - crate_id, - context, - errors, - ); - } + match self.push_child_module(&submodule.name, file_id, true, submodule.is_contract) { + Ok(child) => { + errors.extend(collect_defs( + self.def_collector, + submodule.contents, + file_id, + child, + crate_id, + context, + )); + } + Err(error) => { + errors.push((error.into(), file_id)); + } + }; } + errors } /// Search for a module named `mod_name` @@ -524,8 +532,8 @@ impl<'a> ModCollector<'a> { context: &mut Context, mod_name: &Ident, crate_id: CrateId, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; let child_file_id = match context.file_manager.find_module(self.file_id, &mod_name.0.contents) { Ok(child_file_id) => child_file_id, @@ -533,45 +541,55 @@ impl<'a> ModCollector<'a> { let mod_name = mod_name.clone(); let err = DefCollectorErrorKind::UnresolvedModuleDecl { mod_name, expected_path }; - errors.push(err.into_file_diagnostic(self.file_id)); - return; + errors.push((err.into(), self.file_id)); + return errors; } }; let location = Location { file: self.file_id, span: mod_name.span() }; if let Some(old_location) = context.visited_files.get(&child_file_id) { - let message = format!("Module '{mod_name}' is already part of the crate"); - let secondary = String::new(); - let error = CustomDiagnostic::simple_error(message, secondary, location.span); - errors.push(error.in_file(location.file)); - - let message = format!("Note: {mod_name} was originally declared here"); - let secondary = String::new(); - let error = CustomDiagnostic::simple_error(message, secondary, old_location.span); - errors.push(error.in_file(old_location.file)); - return; + let error = DefCollectorErrorKind::ModuleAlreadyPartOfCrate { + mod_name: mod_name.clone(), + span: location.span, + }; + errors.push((error.into(), location.file)); + + let error2 = DefCollectorErrorKind::ModuleOrignallyDefined { + mod_name: mod_name.clone(), + span: old_location.span, + }; + errors.push((error2.into(), old_location.file)); + return errors; } context.visited_files.insert(child_file_id, location); // Parse the AST for the module we just found and then recursively look for it's defs - let ast = parse_file(&context.file_manager, child_file_id, errors); + //let ast = parse_file(&context.file_manager, child_file_id, errors); + let (ast, parsing_errors) = parse_file(&context.file_manager, child_file_id); + + errors.extend( + parsing_errors.iter().map(|e| (e.clone().into(), child_file_id)).collect::>(), + ); // Add module into def collector and get a ModuleId - if let Some(child_mod_id) = - self.push_child_module(mod_name, child_file_id, true, false, errors) - { - collect_defs( - self.def_collector, - ast, - child_file_id, - child_mod_id, - crate_id, - context, - errors, - ); + match self.push_child_module(mod_name, child_file_id, true, false) { + Ok(child_mod_id) => { + errors.extend(collect_defs( + self.def_collector, + ast, + child_file_id, + child_mod_id, + crate_id, + context, + )); + } + Err(error) => { + errors.push((error.into(), child_file_id)); + } } + errors } /// Add a child module to the current def_map. @@ -582,8 +600,7 @@ impl<'a> ModCollector<'a> { file_id: FileId, add_to_parent_scope: bool, is_contract: bool, - errors: &mut Vec, - ) -> Option { + ) -> Result { let parent = Some(self.module_id); let location = Location::new(mod_name.span(), file_id); let new_module = ModuleData::new(parent, location, is_contract); @@ -615,11 +632,10 @@ impl<'a> ModCollector<'a> { first_def, second_def, }; - errors.push(err.into_file_diagnostic(self.file_id)); - return None; + return Err(err); } } - Some(LocalModuleId(module_id)) + Ok(LocalModuleId(module_id)) } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 1e3b36844f9..3c611baa379 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -1,6 +1,5 @@ use crate::hir::resolution::import::PathResolutionError; use crate::Ident; - use noirc_errors::CustomDiagnostic as Diagnostic; use noirc_errors::FileDiagnostic; use noirc_errors::Span; @@ -8,7 +7,7 @@ use thiserror::Error; use std::fmt; -#[derive(Debug)] +#[derive(Debug, Eq, PartialEq)] pub enum DuplicateType { Function, Module, @@ -49,6 +48,13 @@ pub enum DefCollectorErrorKind { TraitNotFound { trait_ident: Ident }, #[error("Missing Trait method implementation")] TraitMissingMethod { trait_name: Ident, method_name: Ident, trait_impl_span: Span }, + #[error("Module is already part of the crate")] + ModuleAlreadyPartOfCrate { mod_name: Ident, span: Span }, + #[error("Module was originally declared here")] + ModuleOrignallyDefined { mod_name: Ident, span: Span }, + #[cfg(feature = "aztec")] + #[error("Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml")] + AztecNotFound {}, } impl DefCollectorErrorKind { @@ -167,6 +173,20 @@ impl From for Diagnostic { span, ) } + DefCollectorErrorKind::ModuleAlreadyPartOfCrate { mod_name, span } => { + let message = format!("Module '{mod_name}' is already part of the crate"); + let secondary = String::new(); + Diagnostic::simple_error(message, secondary, span) + } + DefCollectorErrorKind::ModuleOrignallyDefined { mod_name, span } => { + let message = format!("Note: {mod_name} was originally declared here"); + let secondary = String::new(); + Diagnostic::simple_error(message, secondary, span) + } + #[cfg(feature = "aztec")] + DefCollectorErrorKind::AztecNotFound {} => Diagnostic::from_message( + "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml", + ), } } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/mod.rs b/compiler/noirc_frontend/src/hir/def_collector/mod.rs index 26f360642ef..f7d11c343fd 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/mod.rs @@ -19,4 +19,4 @@ //! These passes are performed sequentially (along with type checking afterward) in dc_crate. pub mod dc_crate; pub mod dc_mod; -mod errors; +pub mod errors; diff --git a/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs b/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs index e3a9735f936..69adb864317 100644 --- a/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs +++ b/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs @@ -1,7 +1,8 @@ use acvm::FieldElement; -use noirc_errors::{CustomDiagnostic, Span}; +use noirc_errors::Span; use crate::graph::CrateId; +use crate::hir::def_collector::errors::DefCollectorErrorKind; use crate::token::SecondaryAttribute; use crate::{ hir::Context, BlockExpression, CallExpression, CastExpression, Distinctness, Expression, @@ -11,7 +12,7 @@ use crate::{ Visibility, }; use crate::{PrefixExpression, UnaryOp}; -use noirc_errors::FileDiagnostic; +use fm::FileId; // // Helper macros for creating noir ast nodes @@ -155,8 +156,7 @@ pub(crate) fn transform( mut ast: ParsedModule, crate_id: &CrateId, context: &Context, - errors: &mut Vec, -) -> ParsedModule { +) -> Result { // Usage -> mut ast -> aztec_library::transform(&mut ast) // Covers all functions in the ast @@ -164,11 +164,15 @@ pub(crate) fn transform( let storage_defined = check_for_storage_definition(&submodule.contents); if transform_module(&mut submodule.contents.functions, storage_defined) { - check_for_aztec_dependency(crate_id, context, errors); - include_relevant_imports(&mut submodule.contents); + match check_for_aztec_dependency(crate_id, context) { + Ok(()) => include_relevant_imports(&mut submodule.contents), + Err(file_id) => { + return Err((DefCollectorErrorKind::AztecNotFound {}, file_id)); + } + } } } - ast + Ok(ast) } /// Includes an import to the aztec library if it has not been included yet @@ -187,21 +191,13 @@ fn include_relevant_imports(ast: &mut ParsedModule) { } /// Creates an error alerting the user that they have not downloaded the Aztec-noir library -fn check_for_aztec_dependency( - crate_id: &CrateId, - context: &Context, - errors: &mut Vec, -) { +fn check_for_aztec_dependency(crate_id: &CrateId, context: &Context) -> Result<(), FileId> { let crate_graph = &context.crate_graph[crate_id]; let has_aztec_dependency = crate_graph.dependencies.iter().any(|dep| dep.as_name() == "aztec"); - - if !has_aztec_dependency { - errors.push(FileDiagnostic::new( - crate_graph.root_file_id, - CustomDiagnostic::from_message( - "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml", - ), - )); + if has_aztec_dependency { + Ok(()) + } else { + Err(crate_graph.root_file_id) } } diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 017027a1da2..4226454f903 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -1,14 +1,13 @@ use crate::graph::CrateId; -use crate::hir::def_collector::dc_crate::DefCollector; +use crate::hir::def_collector::dc_crate::{CompilationError, DefCollector}; use crate::hir::Context; use crate::node_interner::{FuncId, NodeInterner}; -use crate::parser::{parse_program, ParsedModule}; +use crate::parser::{parse_program, ParsedModule, ParserError}; use crate::token::{FunctionAttribute, TestScope}; use arena::{Arena, Index}; use fm::{FileId, FileManager}; -use noirc_errors::{FileDiagnostic, Location}; +use noirc_errors::Location; use std::collections::BTreeMap; - mod module_def; pub use module_def::*; mod item_scope; @@ -73,23 +72,29 @@ impl CrateDefMap { pub fn collect_defs( crate_id: CrateId, context: &mut Context, - errors: &mut Vec, - ) { + ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled // XXX: There is probably a better alternative for this. // Without this check, the compiler will panic as it does not // expect the same crate to be processed twice. It would not // make the implementation wrong, if the same crate was processed twice, it just makes it slow. + let mut errors: Vec<(CompilationError, FileId)> = vec![]; if context.def_map(&crate_id).is_some() { - return; + return errors; } // First parse the root file. let root_file_id = context.crate_graph[crate_id].root_file_id; - let ast = parse_file(&context.file_manager, root_file_id, errors); + let (ast, parsing_errors) = parse_file(&context.file_manager, root_file_id); #[cfg(feature = "aztec")] - let ast = aztec_library::transform(ast, &crate_id, context, errors); + let ast = match aztec_library::transform(ast, &crate_id, context) { + Ok(ast) => ast, + Err((error, file_id)) => { + errors.push((error.into(), file_id)); + return errors; + } + }; // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); @@ -104,7 +109,11 @@ impl CrateDefMap { }; // Now we want to populate the CrateDefMap using the DefCollector - DefCollector::collect(def_map, context, ast, root_file_id, errors); + errors.extend(DefCollector::collect(def_map, context, ast, root_file_id)); + errors.extend( + parsing_errors.iter().map(|e| (e.clone().into(), root_file_id)).collect::>(), + ); + errors } pub fn root(&self) -> LocalModuleId { @@ -237,15 +246,11 @@ pub struct Contract { } /// Given a FileId, fetch the File, from the FileManager and parse it's content -pub fn parse_file( - fm: &FileManager, - file_id: FileId, - all_errors: &mut Vec, -) -> ParsedModule { +pub fn parse_file(fm: &FileManager, file_id: FileId) -> (ParsedModule, Vec) { let file = fm.fetch_file(file_id); let (program, errors) = parse_program(file.source()); - all_errors.extend(errors.into_iter().map(|error| error.in_file(file_id))); - program + + (program, errors) } impl std::ops::Index for CrateDefMap { diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 36f50525d00..f2f3ae0c5a5 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1570,6 +1570,7 @@ mod test { use crate::hir::resolution::import::PathResolutionError; use crate::hir::resolution::resolver::StmtId; + use super::{PathResolver, Resolver}; use crate::graph::CrateId; use crate::hir_def::expr::HirExpression; use crate::hir_def::stmt::HirStatement; @@ -1579,8 +1580,7 @@ mod test { hir::def_map::{CrateDefMap, LocalModuleId, ModuleDefId}, parse_program, Path, }; - - use super::{PathResolver, Resolver}; + use noirc_errors::CustomDiagnostic; // func_namespace is used to emulate the fact that functions can be imported // and functions can be forward declared @@ -1589,7 +1589,10 @@ mod test { ) -> (ParsedModule, NodeInterner, BTreeMap, FileId, TestPathResolver) { let (program, errors) = parse_program(src); - if errors.iter().any(|e| e.is_error()) { + if errors.iter().any(|e| { + let diagnostic: CustomDiagnostic = e.clone().into(); + diagnostic.is_error() + }) { panic!("Unexpected parse errors in test code: {:?}", errors); } diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 3da674f6552..e88900f1f88 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -46,7 +46,7 @@ use crate::{ use chumsky::prelude::*; use iter_extended::vecmap; -use noirc_errors::{CustomDiagnostic, Span, Spanned}; +use noirc_errors::{Span, Spanned}; /// Entry function for the parser - also handles lexing internally. /// @@ -54,14 +54,10 @@ use noirc_errors::{CustomDiagnostic, Span, Spanned}; /// of the program along with any parsing errors encountered. If the parsing errors /// Vec is non-empty, there may be Error nodes in the Ast to fill in the gaps that /// failed to parse. Otherwise the Ast is guaranteed to have 0 Error nodes. -pub fn parse_program(source_program: &str) -> (ParsedModule, Vec) { - let (tokens, lexing_errors) = Lexer::lex(source_program); - let mut errors = vecmap(lexing_errors, Into::into); - +pub fn parse_program(source_program: &str) -> (ParsedModule, Vec) { + let (tokens, _lexing_errors) = Lexer::lex(source_program); let (module, parsing_errors) = program().parse_recovery_verbose(tokens); - errors.extend(parsing_errors.into_iter().map(Into::into)); - - (module.unwrap(), errors) + (module.unwrap(), parsing_errors) } /// program: module EOF