diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 71207793e53..00000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,120 +0,0 @@ -name: Bug Report -description: Report an unexpected behavior. -labels: ["bug"] -body: - - type: markdown - attributes: - value: | - # Description - Thanks for taking the time to create the Issue and welcome to the Noir community! - - type: textarea - id: aim - attributes: - label: Aim - description: Describe what you tried to achieve. - validations: - required: true - - type: textarea - id: expected - attributes: - label: Expected Behavior - description: Describe what you expected to happen. - validations: - required: true - - type: textarea - id: bug - attributes: - label: Bug - description: Describe the bug. Supply error codes / terminal logs if applicable. - validations: - required: true - - type: textarea - id: reproduction - attributes: - label: To Reproduce - description: Describe the steps to reproduce the behavior. - value: | - 1. - 2. - 3. - 4. - - type: dropdown - id: impact - attributes: - label: Project Impact - description: How does this affect a project you or others are working on? - options: - - "Nice-to-have" - - "Blocker" - - type: textarea - id: impact_context - attributes: - label: Impact Context - description: If a nice-to-have / blocker, supplement how does this Issue affect the project. - - type: dropdown - id: workaround - attributes: - label: Workaround - description: Is there a workaround for this Issue? - options: - - "Yes" - - type: textarea - id: workaround_description - attributes: - label: Workaround Description - description: If yes, supplement how could the Issue be worked around. - - type: textarea - id: additional - attributes: - label: Additional Context - description: Supplement further information if applicable. - - type: markdown - attributes: - value: | - # Environment - Specify your version of Noir tooling used. - - type: markdown - attributes: - value: | - ## Nargo (CLI) - - type: dropdown - id: nargo-install - attributes: - label: Installation Method - description: How did you install Nargo? - options: - - Binary (`noirup` default) - - Compiled from source - - type: input - id: nargo-version - attributes: - label: Nargo Version - description: Output of running `nargo --version` - placeholder: "nargo version = 0.23.0 noirc version = 0.23.0+5be9f9d7e2f39ca228df10e5a530474af0331704 (git version hash: 5be9f9d7e2f39ca228df10e5a530474af0331704, is dirty: false)" - - type: markdown - attributes: - value: | - ## NoirJS (JavaScript) - - type: input - id: noirjs-version - attributes: - label: NoirJS Version - description: Version number of `noir_js` in `package.json` - placeholder: "0.23.0" - - type: markdown - attributes: - value: | - # Pull Request - - type: dropdown - id: pr_preference - attributes: - label: Would you like to submit a PR for this Issue? - description: Fellow contributors are happy to provide support where applicable. - options: - - "Maybe" - - "Yes" - - type: textarea - id: pr_support - attributes: - label: Support Needs - description: Support from other contributors you are looking for to create a PR for this Issue. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index abbfe392454..00000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Feature Request -description: Suggest an idea for this project. -labels: ["enhancement"] -body: - - type: markdown - attributes: - value: | - ## Description - Thanks for taking the time to create the Issue and welcome to the Noir community! - - type: textarea - id: problem - attributes: - label: Problem - description: Describe what you feel lacking. Supply code / step-by-step examples if applicable. - validations: - required: true - - type: textarea - id: solution - attributes: - label: Happy Case - description: Describe how you think it should work. Supply pseudocode / step-by-step examples if applicable. - validations: - required: true - - type: dropdown - id: impact - attributes: - label: Project Impact - description: How does this affect a project you or others are working on? - options: - - "Nice-to-have" - - "Blocker" - - type: textarea - id: impact_context - attributes: - label: Impact Context - description: If a nice-to-have / blocker, supplement how does this Issue affect the project. - - type: dropdown - id: workaround - attributes: - label: Workaround - description: Is there a workaround for this Issue? - options: - - "Yes" - - type: textarea - id: workaround_description - attributes: - label: Workaround Description - description: If yes, supplement how could the Issue be worked around. - - type: textarea - id: additional - attributes: - label: Additional Context - description: Supplement further information if applicable. - - type: markdown - attributes: - value: | - ## Pull Request - - type: dropdown - id: pr-preference - attributes: - label: Would you like to submit a PR for this Issue? - description: Fellow contributors are happy to provide support where applicable. - multiple: false - options: - - "Maybe" - - "Yes" - - type: textarea - id: pr-support - attributes: - label: Support Needs - description: Support from other contributors you are looking for to create a PR for this Issue. diff --git a/Cargo.lock b/Cargo.lock index f6011b705e5..bd70c8fef2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -48,6 +48,7 @@ dependencies = [ "brillig_vm", "indexmap 1.9.3", "num-bigint", + "proptest", "serde", "thiserror", "tracing", diff --git a/acvm-repo/acir/src/circuit/mod.rs b/acvm-repo/acir/src/circuit/mod.rs index 5d749e709b3..00d0933a3aa 100644 --- a/acvm-repo/acir/src/circuit/mod.rs +++ b/acvm-repo/acir/src/circuit/mod.rs @@ -377,11 +377,13 @@ mod tests { output: Witness(3), }) } + fn range_opcode() -> Opcode { Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input: FunctionInput::witness(Witness(1), 8), }) } + fn keccakf1600_opcode() -> Opcode { let inputs: Box<[FunctionInput; 25]> = Box::new(std::array::from_fn(|i| FunctionInput::witness(Witness(i as u32 + 1), 8))); @@ -389,6 +391,7 @@ mod tests { Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccakf1600 { inputs, outputs }) } + fn schnorr_verify_opcode() -> Opcode { let public_key_x = FunctionInput::witness(Witness(1), FieldElement::max_num_bits()); let public_key_y = FunctionInput::witness(Witness(2), FieldElement::max_num_bits()); diff --git a/acvm-repo/acvm/Cargo.toml b/acvm-repo/acvm/Cargo.toml index 5b6397a1011..4cda53de241 100644 --- a/acvm-repo/acvm/Cargo.toml +++ b/acvm-repo/acvm/Cargo.toml @@ -38,3 +38,4 @@ bls12_381 = [ [dev-dependencies] ark-bls12-381 = { version = "^0.4.0", default-features = false, features = ["curve"] } +proptest.workspace = true diff --git a/acvm-repo/acvm/tests/solver.proptest-regressions b/acvm-repo/acvm/tests/solver.proptest-regressions new file mode 100644 index 00000000000..35627c1fbae --- /dev/null +++ b/acvm-repo/acvm/tests/solver.proptest-regressions @@ -0,0 +1,13 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc e4dd0e141df173f5dfdfb186bba4154247ec284b71d8f294fa3282da953a0e92 # shrinks to x = 0, y = 1 +cc 419ed6fdf1bf1f2513889c42ec86c665c9d0500ceb075cbbd07f72444dbd78c6 # shrinks to x = 266672725 +cc 0810fc9e126b56cf0a0ddb25e0dc498fa3b2f1980951550403479fc01c209833 # shrinks to modulus = [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48], zero_or_ones_constant = false, use_constant = false +cc 735ee9beb1a1dbb82ded6f30e544d7dfde149957e5d45a8c96fc65a690b6b71c # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) +cc ca81bc11114a2a2b34021f44ecc1e10cb018e35021ef4d728e07a6791dad38d6 # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) +cc 6c1d571a0111e6b4c244dc16da122ebab361e77b71db7770d638076ab21a717b # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) +cc ccb7061ab6b85e2554d00bf03d74204977ed7a4109d7e2d5c6b5aaa2179cfaf9 # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index e55dbb73ae1..279b0444609 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -5,7 +5,7 @@ use acir::{ brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlockId, BlockType, MemOp}, + opcodes::{BlackBoxFuncCall, BlockId, BlockType, FunctionInput, MemOp}, Opcode, OpcodeLocation, }, native_types::{Expression, Witness, WitnessMap}, @@ -16,6 +16,10 @@ use acvm::pwg::{ACVMStatus, ErrorLocation, ForeignCallWaitInfo, OpcodeResolution use acvm_blackbox_solver::StubbedBlackBoxSolver; use brillig_vm::brillig::HeapValueType; +use proptest::arbitrary::any; +use proptest::prelude::*; +use proptest::result::maybe_ok; + // Reenable these test cases once we move the brillig implementation of inversion down into the acvm stdlib. #[test] @@ -722,3 +726,187 @@ fn memory_operations() { assert_eq!(witness_map[&Witness(8)], FieldElement::from(6u128)); } + +// Solve the given BlackBoxFuncCall with witnesses: 1, 2 as x, y, resp. +#[cfg(test)] +fn solve_blackbox_func_call( + blackbox_func_call: impl Fn( + Option, + Option, + ) -> BlackBoxFuncCall, + x: (FieldElement, bool), // if false, use a Witness + y: (FieldElement, bool), // if false, use a Witness +) -> FieldElement { + let (x, x_constant) = x; + let (y, y_constant) = y; + + let initial_witness = WitnessMap::from(BTreeMap::from_iter([(Witness(1), x), (Witness(2), y)])); + + let mut lhs = None; + if x_constant { + lhs = Some(x); + } + + let mut rhs = None; + if y_constant { + rhs = Some(y); + } + + let op = Opcode::BlackBoxFuncCall(blackbox_func_call(lhs, rhs)); + let opcodes = vec![op]; + let unconstrained_functions = vec![]; + let mut acvm = + ACVM::new(&StubbedBlackBoxSolver, &opcodes, initial_witness, &unconstrained_functions, &[]); + let solver_status = acvm.solve(); + assert_eq!(solver_status, ACVMStatus::Solved); + let witness_map = acvm.finalize(); + + witness_map[&Witness(3)] +} + +fn function_input_from_option( + witness: Witness, + opt_constant: Option, +) -> FunctionInput { + opt_constant + .map(|constant| FunctionInput::constant(constant, FieldElement::max_num_bits())) + .unwrap_or(FunctionInput::witness(witness, FieldElement::max_num_bits())) +} + +fn and_op(x: Option, y: Option) -> BlackBoxFuncCall { + let lhs = function_input_from_option(Witness(1), x); + let rhs = function_input_from_option(Witness(2), y); + BlackBoxFuncCall::AND { lhs, rhs, output: Witness(3) } +} + +fn xor_op(x: Option, y: Option) -> BlackBoxFuncCall { + let lhs = function_input_from_option(Witness(1), x); + let rhs = function_input_from_option(Witness(2), y); + BlackBoxFuncCall::XOR { lhs, rhs, output: Witness(3) } +} + +fn prop_assert_commutative( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + x: (FieldElement, bool), + y: (FieldElement, bool), +) -> (FieldElement, FieldElement) { + (solve_blackbox_func_call(&op, x, y), solve_blackbox_func_call(&op, y, x)) +} + +fn prop_assert_associative( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + x: (FieldElement, bool), + y: (FieldElement, bool), + z: (FieldElement, bool), + use_constant_xy: bool, + use_constant_yz: bool, +) -> (FieldElement, FieldElement) { + let f_xy = (solve_blackbox_func_call(&op, x, y), use_constant_xy); + let f_f_xy_z = solve_blackbox_func_call(&op, f_xy, z); + + let f_yz = (solve_blackbox_func_call(&op, y, z), use_constant_yz); + let f_x_f_yz = solve_blackbox_func_call(&op, x, f_yz); + + (f_f_xy_z, f_x_f_yz) +} + +fn prop_assert_identity_l( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + op_identity: (FieldElement, bool), + x: (FieldElement, bool), +) -> (FieldElement, FieldElement) { + (solve_blackbox_func_call(op, op_identity, x), x.0) +} + +fn prop_assert_zero_l( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + op_zero: (FieldElement, bool), + x: (FieldElement, bool), +) -> (FieldElement, FieldElement) { + (solve_blackbox_func_call(op, op_zero, x), FieldElement::zero()) +} + +prop_compose! { + // Use both `u128` and hex proptest strategies + fn field_element() + (u128_or_hex in maybe_ok(any::(), "[0-9a-f]{64}"), + constant_input: bool) + -> (FieldElement, bool) + { + match u128_or_hex { + Ok(number) => (FieldElement::from(number), constant_input), + Err(hex) => (FieldElement::from_hex(&hex).expect("should accept any 32 byte hex string"), constant_input), + } + } +} + +fn field_element_ones() -> FieldElement { + let exponent: FieldElement = (253_u128).into(); + FieldElement::from(2u128).pow(&exponent) - FieldElement::one() +} + +proptest! { + + #[test] + fn and_commutative(x in field_element(), y in field_element()) { + let (lhs, rhs) = prop_assert_commutative(and_op, x, y); + prop_assert_eq!(lhs, rhs); + } + + #[test] + fn xor_commutative(x in field_element(), y in field_element()) { + let (lhs, rhs) = prop_assert_commutative(xor_op, x, y); + prop_assert_eq!(lhs, rhs); + } + + #[test] + fn and_associative(x in field_element(), y in field_element(), z in field_element(), use_constant_xy: bool, use_constant_yz: bool) { + let (lhs, rhs) = prop_assert_associative(and_op, x, y, z, use_constant_xy, use_constant_yz); + prop_assert_eq!(lhs, rhs); + } + + #[test] + // TODO(https://github.com/noir-lang/noir/issues/5638) + #[should_panic(expected = "assertion failed: `(left == right)`")] + fn xor_associative(x in field_element(), y in field_element(), z in field_element(), use_constant_xy: bool, use_constant_yz: bool) { + let (lhs, rhs) = prop_assert_associative(xor_op, x, y, z, use_constant_xy, use_constant_yz); + prop_assert_eq!(lhs, rhs); + } + + // test that AND(x, x) == x + #[test] + fn and_self_identity(x in field_element()) { + prop_assert_eq!(solve_blackbox_func_call(and_op, x, x), x.0); + } + + // test that XOR(x, x) == 0 + #[test] + fn xor_self_zero(x in field_element()) { + prop_assert_eq!(solve_blackbox_func_call(xor_op, x, x), FieldElement::zero()); + } + + #[test] + fn and_identity_l(x in field_element(), ones_constant: bool) { + let ones = (field_element_ones(), ones_constant); + let (lhs, rhs) = prop_assert_identity_l(and_op, ones, x); + if x <= ones { + prop_assert_eq!(lhs, rhs); + } else { + prop_assert!(lhs != rhs); + } + } + + #[test] + fn xor_identity_l(x in field_element(), zero_constant: bool) { + let zero = (FieldElement::zero(), zero_constant); + let (lhs, rhs) = prop_assert_identity_l(xor_op, zero, x); + prop_assert_eq!(lhs, rhs); + } + + #[test] + fn and_zero_l(x in field_element(), ones_constant: bool) { + let zero = (FieldElement::zero(), ones_constant); + let (lhs, rhs) = prop_assert_zero_l(and_op, zero, x); + prop_assert_eq!(lhs, rhs); + } +} diff --git a/aztec_macros/src/transforms/note_interface.rs b/aztec_macros/src/transforms/note_interface.rs index b2ec2a8b7c3..1bf84779a45 100644 --- a/aztec_macros/src/transforms/note_interface.rs +++ b/aztec_macros/src/transforms/note_interface.rs @@ -74,7 +74,6 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt generics: vec![], methods: vec![], where_clause: vec![], - is_comptime: false, }; module.impls.push(default_impl.clone()); module.impls.last_mut().unwrap() diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs index 073152a297b..6999234919f 100644 --- a/aztec_macros/src/transforms/storage.rs +++ b/aztec_macros/src/transforms/storage.rs @@ -246,7 +246,6 @@ pub fn generate_storage_implementation( methods: vec![(init, Span::default())], where_clause: vec![], - is_comptime: false, }; module.impls.push(storage_impl); diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index ac4da2892fb..8ce2e1a41c0 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -461,6 +461,18 @@ pub struct PathSegment { pub span: Span, } +impl PathSegment { + /// Returns the span where turbofish happen. For example: + /// + /// foo:: + /// ~^^^^ + /// + /// Returns an empty span at the end of `foo` if there's no turbofish. + pub fn turbofish_span(&self) -> Span { + Span::from(self.ident.span().end()..self.span.end()) + } +} + impl From for PathSegment { fn from(ident: Ident) -> PathSegment { let span = ident.span(); diff --git a/compiler/noirc_frontend/src/ast/structure.rs b/compiler/noirc_frontend/src/ast/structure.rs index 112747e09fb..732cbee9232 100644 --- a/compiler/noirc_frontend/src/ast/structure.rs +++ b/compiler/noirc_frontend/src/ast/structure.rs @@ -14,7 +14,6 @@ pub struct NoirStruct { pub generics: UnresolvedGenerics, pub fields: Vec<(Ident, UnresolvedType)>, pub span: Span, - pub is_comptime: bool, } impl Display for NoirStruct { diff --git a/compiler/noirc_frontend/src/ast/traits.rs b/compiler/noirc_frontend/src/ast/traits.rs index b23fbaede61..f8f8ef667b4 100644 --- a/compiler/noirc_frontend/src/ast/traits.rs +++ b/compiler/noirc_frontend/src/ast/traits.rs @@ -53,7 +53,6 @@ pub struct TypeImpl { pub generics: UnresolvedGenerics, pub where_clause: Vec, pub methods: Vec<(NoirFunction, Span)>, - pub is_comptime: bool, } /// Ast node for an implementation of a trait for a particular type @@ -70,8 +69,6 @@ pub struct NoirTraitImpl { pub where_clause: Vec, pub items: Vec, - - pub is_comptime: bool, } /// Represents a simple trait constraint such as `where Foo: TraitY` diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs index 1683409547e..295297cc738 100644 --- a/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -39,7 +39,7 @@ impl<'context> Elaborator<'context> { let (hir_expr, typ) = match expr.kind { ExpressionKind::Literal(literal) => self.elaborate_literal(literal, expr.span), ExpressionKind::Block(block) => self.elaborate_block(block), - ExpressionKind::Prefix(prefix) => return self.elaborate_prefix(*prefix), + ExpressionKind::Prefix(prefix) => return self.elaborate_prefix(*prefix, expr.span), ExpressionKind::Index(index) => self.elaborate_index(*index), ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), @@ -225,8 +225,7 @@ impl<'context> Elaborator<'context> { (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) } - fn elaborate_prefix(&mut self, prefix: PrefixExpression) -> (ExprId, Type) { - let span = prefix.rhs.span; + fn elaborate_prefix(&mut self, prefix: PrefixExpression, span: Span) -> (ExprId, Type) { let (rhs, rhs_type) = self.elaborate_expression(prefix.rhs); let trait_id = self.interner.get_prefix_operator_trait_method(&prefix.operator); @@ -430,23 +429,14 @@ impl<'context> Elaborator<'context> { } }; - let struct_generics = if let Some(turbofish_generics) = &last_segment.generics { - if turbofish_generics.len() == struct_generics.len() { - let struct_type = r#type.borrow(); - self.resolve_turbofish_generics(&struct_type.generics, turbofish_generics.clone()) - } else { - self.push_err(TypeCheckError::GenericCountMismatch { - item: format!("struct {}", last_segment.ident), - expected: struct_generics.len(), - found: turbofish_generics.len(), - span: Span::from(last_segment.ident.span().end()..last_segment.span.end()), - }); + let turbofish_span = last_segment.turbofish_span(); - struct_generics - } - } else { - struct_generics - }; + let struct_generics = self.resolve_struct_turbofish_generics( + &r#type.borrow(), + struct_generics, + last_segment.generics, + turbofish_span, + ); let struct_type = r#type.clone(); let generics = struct_generics.clone(); diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index f103e3a7954..d224e38372f 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -11,7 +11,7 @@ use crate::{ def_collector::{ dc_crate::{ filter_literal_globals, CompilationError, ImplMap, UnresolvedGlobal, - UnresolvedStruct, UnresolvedTypeAlias, + UnresolvedStruct, UnresolvedTrait, UnresolvedTypeAlias, }, dc_mod, }, @@ -251,15 +251,7 @@ impl<'context> Elaborator<'context> { debug_comptime_in_file: Option, ) -> Self { let mut this = Self::from_context(context, crate_id, debug_comptime_in_file); - - // Filter out comptime items to execute their functions first if needed. - // This step is why comptime items can only refer to other comptime items - // in the same crate, but can refer to any item in dependencies. Trying to - // run these at the same time as other items would lead to them seeing empty - // function bodies from functions that have yet to be elaborated. - let (comptime_items, runtime_items) = Self::filter_comptime_items(items); - this.elaborate_items(comptime_items); - this.elaborate_items(runtime_items); + this.elaborate_items(items); this.check_and_pop_function_context(); this } @@ -284,11 +276,11 @@ impl<'context> Elaborator<'context> { } // Must resolve structs before we resolve globals. - let mut generated_items = self.collect_struct_definitions(items.types); + self.collect_struct_definitions(&items.types); self.define_function_metas(&mut items.functions, &mut items.impls, &mut items.trait_impls); - self.collect_traits(items.traits, &mut generated_items); + self.collect_traits(&items.traits); // Before we resolve any function symbols we must go through our impls and // re-collect the methods within into their proper module. This cannot be @@ -312,7 +304,7 @@ impl<'context> Elaborator<'context> { // We have to run any comptime attributes on functions before the function is elaborated // since the generated items are checked beforehand as well. - self.run_attributes_on_functions(&items.functions, &mut generated_items); + let generated_items = self.run_attributes(&items.traits, &items.types, &items.functions); // After everything is collected, we can elaborate our generated items. // It may be better to inline these within `items` entirely since elaborating them @@ -626,7 +618,11 @@ impl<'context> Elaborator<'context> { self.resolve_trait_bound(&constraint.trait_bound, typ) } - fn resolve_trait_bound(&mut self, bound: &TraitBound, typ: Type) -> Option { + pub fn resolve_trait_bound( + &mut self, + bound: &TraitBound, + typ: Type, + ) -> Option { let the_trait = self.lookup_trait_or_error(bound.trait_path.clone())?; let resolved_generics = &the_trait.generics.clone(); @@ -1119,30 +1115,20 @@ impl<'context> Elaborator<'context> { self.generics.clear(); } - fn collect_struct_definitions( - &mut self, - structs: BTreeMap, - ) -> CollectedItems { + fn collect_struct_definitions(&mut self, structs: &BTreeMap) { // This is necessary to avoid cloning the entire struct map // when adding checks after each struct field is resolved. let struct_ids = structs.keys().copied().collect::>(); - // This will contain any additional top-level items that are generated at compile-time - // via macros. This often includes derived trait impls. - let mut generated_items = CollectedItems::default(); - // Resolve each field in each struct. // Each struct should already be present in the NodeInterner after def collection. - for (type_id, mut typ) in structs { + for (type_id, typ) in structs { self.file = typ.file_id; self.local_module = typ.module_id; - let attributes = std::mem::take(&mut typ.struct_def.attributes); - let span = typ.struct_def.span; - - let fields = self.resolve_struct_fields(typ.struct_def, type_id); + let fields = self.resolve_struct_fields(&typ.struct_def, *type_id); let fields_len = fields.len(); - self.interner.update_struct(type_id, |struct_def| { + self.interner.update_struct(*type_id, |struct_def| { struct_def.set_fields(fields); // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this with implicit numeric generics @@ -1169,12 +1155,11 @@ impl<'context> Elaborator<'context> { }); for field_index in 0..fields_len { - self.interner - .add_definition_location(ReferenceId::StructMember(type_id, field_index), None); + self.interner.add_definition_location( + ReferenceId::StructMember(*type_id, field_index), + None, + ); } - - let item = Value::StructDefinition(type_id); - self.run_comptime_attributes_on_item(&attributes, item, span, &mut generated_items); } // Check whether the struct fields have nested slices @@ -1196,8 +1181,6 @@ impl<'context> Elaborator<'context> { } } } - - generated_items } fn run_comptime_attributes_on_item( @@ -1314,7 +1297,7 @@ impl<'context> Elaborator<'context> { pub fn resolve_struct_fields( &mut self, - unresolved: NoirStruct, + unresolved: &NoirStruct, struct_id: StructId, ) -> Vec<(Ident, Type)> { self.recover_generics(|this| { @@ -1325,7 +1308,9 @@ impl<'context> Elaborator<'context> { let struct_def = this.interner.get_struct(struct_id); this.add_existing_generics(&unresolved.generics, &struct_def.borrow().generics); - let fields = vecmap(unresolved.fields, |(ident, typ)| (ident, this.resolve_type(typ))); + let fields = vecmap(&unresolved.fields, |(ident, typ)| { + (ident.clone(), this.resolve_type(typ.clone())) + }); this.resolving_ids.remove(&struct_id); @@ -1504,66 +1489,6 @@ impl<'context> Elaborator<'context> { }) } - /// Filters out comptime items from non-comptime items. - /// Returns a pair of (comptime items, non-comptime items) - fn filter_comptime_items(mut items: CollectedItems) -> (CollectedItems, CollectedItems) { - let mut function_sets = Vec::with_capacity(items.functions.len()); - let mut comptime_function_sets = Vec::new(); - - for function_set in items.functions { - let mut functions = Vec::with_capacity(function_set.functions.len()); - let mut comptime_functions = Vec::new(); - - for function in function_set.functions { - if function.2.def.is_comptime { - comptime_functions.push(function); - } else { - functions.push(function); - } - } - - let file_id = function_set.file_id; - let self_type = function_set.self_type; - let trait_id = function_set.trait_id; - - if !comptime_functions.is_empty() { - comptime_function_sets.push(UnresolvedFunctions { - functions: comptime_functions, - file_id, - trait_id, - self_type: self_type.clone(), - }); - } - - function_sets.push(UnresolvedFunctions { functions, file_id, trait_id, self_type }); - } - - let (comptime_trait_impls, trait_impls) = - items.trait_impls.into_iter().partition(|trait_impl| trait_impl.is_comptime); - - let (comptime_structs, structs) = - items.types.into_iter().partition(|typ| typ.1.struct_def.is_comptime); - - let (comptime_globals, globals) = - items.globals.into_iter().partition(|global| global.stmt_def.comptime); - - let comptime = CollectedItems { - functions: comptime_function_sets, - types: comptime_structs, - type_aliases: BTreeMap::new(), - traits: BTreeMap::new(), - trait_impls: comptime_trait_impls, - globals: comptime_globals, - impls: rustc_hash::FxHashMap::default(), - }; - - items.functions = function_sets; - items.trait_impls = trait_impls; - items.types = structs; - items.globals = globals; - (comptime, items) - } - fn add_items( &mut self, items: Vec, @@ -1612,7 +1537,6 @@ impl<'context> Elaborator<'context> { methods, generics: trait_impl.impl_generics, where_clause: trait_impl.where_clause, - is_comptime: trait_impl.is_comptime, // These last fields are filled in later trait_id: None, @@ -1676,6 +1600,36 @@ impl<'context> Elaborator<'context> { } } + /// Run all the attributes on each item. The ordering is unspecified to users but currently + /// we run trait attributes first to (e.g.) register derive handlers before derive is + /// called on structs. + /// Returns any new items generated by attributes. + fn run_attributes( + &mut self, + traits: &BTreeMap, + types: &BTreeMap, + functions: &[UnresolvedFunctions], + ) -> CollectedItems { + let mut generated_items = CollectedItems::default(); + + for (trait_id, trait_) in traits { + let attributes = &trait_.trait_def.attributes; + let item = Value::TraitDefinition(*trait_id); + let span = trait_.trait_def.span; + self.run_comptime_attributes_on_item(attributes, item, span, &mut generated_items); + } + + for (struct_id, struct_def) in types { + let attributes = &struct_def.struct_def.attributes; + let item = Value::StructDefinition(*struct_id); + let span = struct_def.struct_def.span; + self.run_comptime_attributes_on_item(attributes, item, span, &mut generated_items); + } + + self.run_attributes_on_functions(functions, &mut generated_items); + generated_items + } + fn run_attributes_on_functions( &mut self, function_sets: &[UnresolvedFunctions], diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index 7aab8d1a24c..ade5420bce4 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -157,8 +157,12 @@ impl<'context> Elaborator<'context> { mutable: Option, new_definitions: &mut Vec, ) -> HirPattern { - let name_span = name.last_ident().span(); - let is_self_type = name.last_ident().is_self_type_name(); + let exclude_last_segment = true; + self.check_unsupported_turbofish_usage(&name, exclude_last_segment); + + let last_segment = name.last_segment(); + let name_span = last_segment.ident.span(); + let is_self_type = last_segment.ident.is_self_type_name(); let error_identifier = |this: &mut Self| { // Must create a name here to return a HirPattern::Identifier. Allowing @@ -178,6 +182,15 @@ impl<'context> Elaborator<'context> { } }; + let turbofish_span = last_segment.turbofish_span(); + + let generics = self.resolve_struct_turbofish_generics( + &struct_type.borrow(), + generics, + last_segment.generics, + turbofish_span, + ); + let actual_type = Type::Struct(struct_type.clone(), generics); let location = Location::new(span, self.file); @@ -426,6 +439,30 @@ impl<'context> Elaborator<'context> { }) } + pub(super) fn resolve_struct_turbofish_generics( + &mut self, + struct_type: &StructType, + generics: Vec, + unresolved_turbofish: Option>, + span: Span, + ) -> Vec { + let Some(turbofish_generics) = unresolved_turbofish else { + return generics; + }; + + if turbofish_generics.len() != generics.len() { + self.push_err(TypeCheckError::GenericCountMismatch { + item: format!("struct {}", struct_type.name), + expected: generics.len(), + found: turbofish_generics.len(), + span, + }); + return generics; + } + + self.resolve_turbofish_generics(&struct_type.generics, turbofish_generics) + } + pub(super) fn resolve_turbofish_generics( &mut self, generics: &[ResolvedGeneric], diff --git a/compiler/noirc_frontend/src/elaborator/traits.rs b/compiler/noirc_frontend/src/elaborator/traits.rs index a00e770218e..1e48fdd07e7 100644 --- a/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/compiler/noirc_frontend/src/elaborator/traits.rs @@ -8,9 +8,7 @@ use crate::{ FunctionKind, TraitItem, UnresolvedGeneric, UnresolvedGenerics, UnresolvedTraitConstraint, }, hir::{ - def_collector::dc_crate::{ - CollectedItems, CompilationError, UnresolvedTrait, UnresolvedTraitImpl, - }, + def_collector::dc_crate::{CompilationError, UnresolvedTrait, UnresolvedTraitImpl}, type_check::TypeCheckError, }, hir_def::{ @@ -29,14 +27,10 @@ use crate::{ use super::Elaborator; impl<'context> Elaborator<'context> { - pub fn collect_traits( - &mut self, - traits: BTreeMap, - generated_items: &mut CollectedItems, - ) { + pub fn collect_traits(&mut self, traits: &BTreeMap) { for (trait_id, unresolved_trait) in traits { self.recover_generics(|this| { - let resolved_generics = this.interner.get_trait(trait_id).generics.clone(); + let resolved_generics = this.interner.get_trait(*trait_id).generics.clone(); this.add_existing_generics( &unresolved_trait.trait_def.generics, &resolved_generics, @@ -44,28 +38,23 @@ impl<'context> Elaborator<'context> { // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) - let _ = this.resolve_trait_types(&unresolved_trait); + let _ = this.resolve_trait_types(unresolved_trait); // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) - let _ = this.resolve_trait_constants(&unresolved_trait); + let _ = this.resolve_trait_constants(unresolved_trait); // 3. Trait Methods - let methods = this.resolve_trait_methods(trait_id, &unresolved_trait); + let methods = this.resolve_trait_methods(*trait_id, unresolved_trait); - this.interner.update_trait(trait_id, |trait_def| { + this.interner.update_trait(*trait_id, |trait_def| { trait_def.set_methods(methods); }); - - let attributes = &unresolved_trait.trait_def.attributes; - let item = crate::hir::comptime::Value::TraitDefinition(trait_id); - let span = unresolved_trait.trait_def.span; - this.run_comptime_attributes_on_item(attributes, item, span, generated_items); }); // This check needs to be after the trait's methods are set since // the interner may set `interner.ordering_type` based on the result type // of the Cmp trait, if this is it. if self.crate_id.is_stdlib() { - self.interner.try_add_infix_operator_trait(trait_id); - self.interner.try_add_prefix_operator_trait(trait_id); + self.interner.try_add_infix_operator_trait(*trait_id); + self.interner.try_add_prefix_operator_trait(*trait_id); } } } diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs index 430967d8a51..c134820811e 100644 --- a/compiler/noirc_frontend/src/elaborator/types.rs +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -239,6 +239,7 @@ impl<'context> Elaborator<'context> { if let Some(type_alias) = self.lookup_type_alias(path.clone()) { let type_alias = type_alias.borrow(); + let actual_generic_count = args.len(); let expected_generic_count = type_alias.generics.len(); let type_alias_string = type_alias.to_string(); let id = type_alias.id; @@ -247,9 +248,13 @@ impl<'context> Elaborator<'context> { self.resolve_type_inner(arg, &generic.kind) }); - self.verify_generics_count(expected_generic_count, &mut args, span, || { - type_alias_string - }); + self.verify_generics_count( + expected_generic_count, + actual_generic_count, + &mut args, + span, + || type_alias_string, + ); if let Some(item) = self.current_item { self.interner.add_type_alias_dependency(item, id); @@ -279,6 +284,8 @@ impl<'context> Elaborator<'context> { } let expected_generic_count = struct_type.borrow().generics.len(); + let actual_generic_count = args.len(); + if !self.in_contract() && self .interner @@ -296,9 +303,13 @@ impl<'context> Elaborator<'context> { self.resolve_type_inner(arg, &generic.kind) }); - self.verify_generics_count(expected_generic_count, &mut args, span, || { - struct_type.borrow().to_string() - }); + self.verify_generics_count( + expected_generic_count, + actual_generic_count, + &mut args, + span, + || struct_type.borrow().to_string(), + ); if let Some(current_item) = self.current_item { let dependency_id = struct_type.borrow().id; @@ -333,15 +344,16 @@ impl<'context> Elaborator<'context> { fn verify_generics_count( &mut self, expected_count: usize, + actual_count: usize, args: &mut Vec, span: Span, type_name: impl FnOnce() -> String, ) { - if args.len() != expected_count { + if actual_count != expected_count { self.push_err(ResolverError::IncorrectGenericCount { span, item_name: type_name(), - actual: args.len(), + actual: actual_count, expected: expected_count, }); @@ -1623,8 +1635,7 @@ impl<'context> Elaborator<'context> { } if segment.generics.is_some() { - // From "foo::", create a span for just "::" - let span = Span::from(segment.ident.span().end()..segment.span.end()); + let span = segment.turbofish_span(); self.push_err(TypeCheckError::UnsupportedTurbofishUsage { span }); } } diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index 010c41d2385..137433b48ef 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -2,6 +2,7 @@ use std::fmt::Display; use std::rc::Rc; use crate::{ + ast::TraitBound, hir::{def_collector::dc_crate::CompilationError, type_check::NoMatchingImplFoundError}, parser::ParserError, token::Tokens, @@ -49,13 +50,14 @@ pub enum InterpreterError { DebugEvaluateComptime { diagnostic: CustomDiagnostic, location: Location }, FailedToParseMacro { error: ParserError, tokens: Rc, rule: &'static str, file: FileId }, UnsupportedTopLevelItemUnquote { item: String, location: Location }, - NonComptimeFnCallInSameCrate { function: String, location: Location }, + ComptimeDependencyCycle { function: String, location: Location }, NoImpl { location: Location }, NoMatchingImplFound { error: NoMatchingImplFoundError, file: FileId }, ImplMethodTypeMismatch { expected: Type, actual: Type, location: Location }, BreakNotInLoop { location: Location }, ContinueNotInLoop { location: Location }, BlackBoxError(BlackBoxResolutionError, Location), + FailedToResolveTraitBound { trait_bound: TraitBound, location: Location }, Unimplemented { item: String, location: Location }, @@ -112,14 +114,15 @@ impl InterpreterError { | InterpreterError::CannotInlineMacro { location, .. } | InterpreterError::UnquoteFoundDuringEvaluation { location, .. } | InterpreterError::UnsupportedTopLevelItemUnquote { location, .. } - | InterpreterError::NonComptimeFnCallInSameCrate { location, .. } + | InterpreterError::ComptimeDependencyCycle { location, .. } | InterpreterError::Unimplemented { location, .. } | InterpreterError::NoImpl { location, .. } | InterpreterError::ImplMethodTypeMismatch { location, .. } | InterpreterError::DebugEvaluateComptime { location, .. } | InterpreterError::BlackBoxError(_, location) | InterpreterError::BreakNotInLoop { location, .. } - | InterpreterError::ContinueNotInLoop { location, .. } => *location, + | InterpreterError::ContinueNotInLoop { location, .. } + | InterpreterError::FailedToResolveTraitBound { location, .. } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) @@ -342,10 +345,10 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { error.add_note(format!("Unquoted item was:\n{item}")); error } - InterpreterError::NonComptimeFnCallInSameCrate { function, location } => { - let msg = format!("`{function}` cannot be called in a `comptime` context here"); + InterpreterError::ComptimeDependencyCycle { function, location } => { + let msg = format!("Comptime dependency cycle while resolving `{function}`"); let secondary = - "This function must be `comptime` or in a separate crate to be called".into(); + "This function uses comptime code internally which calls into itself".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) } InterpreterError::Unimplemented { item, location } => { @@ -373,6 +376,10 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { InterpreterError::BlackBoxError(error, location) => { CustomDiagnostic::simple_error(error.to_string(), String::new(), location.span) } + InterpreterError::FailedToResolveTraitBound { trait_bound, location } => { + let msg = format!("Failed to resolve trait bound `{trait_bound}`"); + CustomDiagnostic::simple_error(msg, String::new(), location.span) + } InterpreterError::NoMatchingImplFound { error, .. } => error.into(), InterpreterError::Break => unreachable!("Uncaught InterpreterError::Break"), InterpreterError::Continue => unreachable!("Uncaught InterpreterError::Continue"), diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index c1622d1fed0..a3d37fd76fc 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -18,6 +18,7 @@ use crate::monomorphization::{ undo_instantiation_bindings, }; use crate::token::Tokens; +use crate::TypeVariable; use crate::{ hir_def::{ expr::{ @@ -53,6 +54,12 @@ pub struct Interpreter<'local, 'interner> { in_loop: bool, current_function: Option, + + /// Maps each bound generic to each binding it has in the current callstack. + /// Since the interpreter monomorphizes as it interprets, we can bind over the same generic + /// multiple times. Without this map, when one of these inner functions exits we would + /// unbind the generic completely instead of resetting it to its previous binding. + bound_generics: Vec>, } #[allow(unused)] @@ -62,26 +69,41 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { crate_id: CrateId, current_function: Option, ) -> Self { - Self { elaborator, crate_id, current_function, in_loop: false } + let bound_generics = Vec::new(); + Self { elaborator, crate_id, current_function, bound_generics, in_loop: false } } pub(crate) fn call_function( &mut self, function: FuncId, arguments: Vec<(Value, Location)>, - instantiation_bindings: TypeBindings, + mut instantiation_bindings: TypeBindings, location: Location, ) -> IResult { let trait_method = self.elaborator.interner.get_trait_method_id(function); + // To match the monomorphizer, we need to call follow_bindings on each of + // the instantiation bindings before we unbind the generics from the previous function. + // This is because the instantiation bindings refer to variables from the call site. + for (_, binding) in instantiation_bindings.values_mut() { + *binding = binding.follow_bindings(); + } + + self.unbind_generics_from_previous_function(); perform_instantiation_bindings(&instantiation_bindings); - let impl_bindings = + let mut impl_bindings = perform_impl_bindings(self.elaborator.interner, trait_method, function, location)?; + for (_, binding) in impl_bindings.values_mut() { + *binding = binding.follow_bindings(); + } + + self.remember_bindings(&instantiation_bindings, &impl_bindings); let result = self.call_function_inner(function, arguments, location); undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(instantiation_bindings); + self.rebind_generics_from_previous_function(); result } @@ -100,14 +122,6 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { }); } - let is_comptime = self.elaborator.interner.function_modifiers(&function).is_comptime; - if !is_comptime && meta.source_crate == self.crate_id { - // Calling non-comptime functions from within the current crate is restricted - // as non-comptime items will have not been elaborated yet. - let function = self.elaborator.interner.function_name(&function).to_owned(); - return Err(InterpreterError::NonComptimeFnCallInSameCrate { function, location }); - } - if meta.kind != FunctionKind::Normal { let return_type = meta.return_type().follow_bindings(); return self.call_special(function, arguments, return_type, location); @@ -159,7 +173,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.get_function_body(function, location) } else { let function = self.elaborator.interner.function_name(&function).to_owned(); - Err(InterpreterError::NonComptimeFnCallInSameCrate { function, location }) + Err(InterpreterError::ComptimeDependencyCycle { function, location }) } } } @@ -185,6 +199,9 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } else if let Some(oracle) = func_attrs.oracle() { if oracle == "print" { self.print_oracle(arguments) + // Ignore debugger functions + } else if oracle.starts_with("__debug") { + Ok(Value::Unit) } else { let item = format!("Comptime evaluation for oracle functions like {oracle}"); Err(InterpreterError::Unimplemented { item, location }) @@ -258,6 +275,42 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.elaborator.comptime_scopes.last_mut().unwrap() } + fn unbind_generics_from_previous_function(&mut self) { + if let Some(bindings) = self.bound_generics.last() { + for var in bindings.keys() { + var.unbind(var.id()); + } + } + // Push a new bindings list for the current function + self.bound_generics.push(HashMap::default()); + } + + fn rebind_generics_from_previous_function(&mut self) { + // Remove the currently bound generics first. + self.bound_generics.pop(); + + if let Some(bindings) = self.bound_generics.last() { + for (var, binding) in bindings { + var.force_bind(binding.clone()); + } + } + } + + fn remember_bindings(&mut self, main_bindings: &TypeBindings, impl_bindings: &TypeBindings) { + let bound_generics = self + .bound_generics + .last_mut() + .expect("remember_bindings called with no bound_generics on the stack"); + + for (var, binding) in main_bindings.values() { + bound_generics.insert(var.clone(), binding.follow_bindings()); + } + + for (var, binding) in impl_bindings.values() { + bound_generics.insert(var.clone(), binding.follow_bindings()); + } + } + pub(super) fn define_pattern( &mut self, pattern: &HirPattern, diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 92890cb66b8..b35790fd3d4 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -6,13 +6,13 @@ use std::{ use acvm::{AcirField, FieldElement}; use chumsky::Parser; use iter_extended::{try_vecmap, vecmap}; -use noirc_errors::{Location, Span}; +use noirc_errors::Location; use rustc_hash::FxHashMap as HashMap; use crate::{ - ast::{IntegerBitSize, TraitBound}, + ast::IntegerBitSize, hir::comptime::{errors::IResult, InterpreterError, Value}, - macros_api::{NodeInterner, Path, Signedness, UnresolvedTypeData}, + macros_api::{NodeInterner, Signedness}, node_interner::TraitId, parser, token::{SpannedToken, Token, Tokens}, @@ -53,9 +53,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "trait_def_as_trait_constraint" => { trait_def_as_trait_constraint(interner, arguments, location) } - "quoted_as_trait_constraint" => { - quoted_as_trait_constraint(interner, arguments, location) - } + "quoted_as_trait_constraint" => quoted_as_trait_constraint(self, arguments, location), "quoted_as_type" => quoted_as_type(self, arguments, location), "zeroed" => zeroed(return_type), _ => { @@ -133,9 +131,9 @@ pub(super) fn get_u32(value: Value, location: Location) -> IResult { } } -fn get_trait_constraint(value: Value, location: Location) -> IResult { +fn get_trait_constraint(value: Value, location: Location) -> IResult<(TraitId, Vec)> { match value { - Value::TraitConstraint(bound) => Ok(bound), + Value::TraitConstraint(trait_id, generics) => Ok((trait_id, generics)), value => { let expected = Type::Quoted(QuotedType::TraitConstraint); Err(InterpreterError::TypeMismatch { expected, value, location }) @@ -387,7 +385,7 @@ fn slice_insert( // fn as_trait_constraint(quoted: Quoted) -> TraitConstraint fn quoted_as_trait_constraint( - _interner: &mut NodeInterner, + interpreter: &mut Interpreter, mut arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { @@ -402,7 +400,14 @@ fn quoted_as_trait_constraint( InterpreterError::FailedToParseMacro { error, tokens, rule, file: location.file } })?; - Ok(Value::TraitConstraint(trait_bound)) + let bound = interpreter + .elaborator + .elaborate_item_from_comptime(interpreter.current_function, |elaborator| { + elaborator.resolve_trait_bound(&trait_bound, Type::Unit) + }) + .ok_or(InterpreterError::FailedToResolveTraitBound { trait_bound, location })?; + + Ok(Value::TraitConstraint(bound.trait_id, bound.trait_generics)) } // fn as_type(quoted: Quoted) -> Type @@ -529,11 +534,6 @@ fn zeroed(return_type: Type) -> IResult { let element = zeroed(*element)?; Ok(Value::Pointer(Shared::new(element), false)) } - Type::Quoted(QuotedType::TraitConstraint) => Ok(Value::TraitConstraint(TraitBound { - trait_path: Path::from_single(String::new(), Span::default()), - trait_id: None, - trait_generics: Vec::new(), - })), // Optimistically assume we can resolve this type later or that the value is unused Type::TypeVariable(_, _) | Type::Forall(_, _) @@ -618,14 +618,9 @@ fn trait_def_as_trait_constraint( let trait_id = get_trait_def(arguments.pop().unwrap().0, location)?; let the_trait = interner.get_trait(trait_id); - - let trait_path = Path::from_ident(the_trait.name.clone()); - let trait_generics = vecmap(&the_trait.generics, |generic| { - let name = Path::from_single(generic.name.as_ref().clone(), generic.span); - UnresolvedTypeData::Named(name, Vec::new(), false).with_span(generic.span) + Type::NamedGeneric(generic.type_var.clone(), generic.name.clone(), generic.kind.clone()) }); - let trait_id = Some(trait_id); - Ok(Value::TraitConstraint(TraitBound { trait_path, trait_id, trait_generics })) + Ok(Value::TraitConstraint(trait_id, trait_generics)) } diff --git a/compiler/noirc_frontend/src/hir/comptime/value.rs b/compiler/noirc_frontend/src/hir/comptime/value.rs index d20372e6812..0959e4c17ac 100644 --- a/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -7,7 +7,7 @@ use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use crate::{ - ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness, TraitBound}, + ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness}, hir::def_map::ModuleId, hir_def::expr::{HirArrayLiteral, HirConstructorExpression, HirIdent, HirLambda, ImplKind}, macros_api::{ @@ -48,7 +48,7 @@ pub enum Value { Slice(Vector, Type), Code(Rc), StructDefinition(StructId), - TraitConstraint(TraitBound), + TraitConstraint(TraitId, /* trait generics */ Vec), TraitDefinition(TraitId), FunctionDefinition(FuncId), ModuleDefinition(ModuleId), @@ -222,7 +222,7 @@ impl Value { } Value::Pointer(..) | Value::StructDefinition(_) - | Value::TraitConstraint(_) + | Value::TraitConstraint(..) | Value::TraitDefinition(_) | Value::FunctionDefinition(_) | Value::Zeroed(_) @@ -342,7 +342,7 @@ impl Value { Value::Code(block) => HirExpression::Unquote(unwrap_rc(block)), Value::Pointer(..) | Value::StructDefinition(_) - | Value::TraitConstraint(_) + | Value::TraitConstraint(..) | Value::TraitDefinition(_) | Value::FunctionDefinition(_) | Value::Zeroed(_) diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 4aa61b50e55..60489660762 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -80,7 +80,6 @@ pub struct UnresolvedTraitImpl { pub methods: UnresolvedFunctions, pub generics: UnresolvedGenerics, pub where_clause: Vec, - pub is_comptime: bool, // Every field after this line is filled in later in the elaborator pub trait_id: Option, diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 1dca6ded786..9eef1d7b77e 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -188,7 +188,6 @@ impl<'a> ModCollector<'a> { generics: trait_impl.impl_generics, where_clause: trait_impl.where_clause, trait_generics: trait_impl.trait_generics, - is_comptime: trait_impl.is_comptime, // These last fields are filled later on trait_id: None, diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index b58f447d406..a7c62048283 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -218,9 +218,8 @@ fn top_level_statement<'a>( /// /// implementation: 'impl' generics type '{' function_definition ... '}' fn implementation() -> impl NoirParser { - maybe_comp_time() - .then_ignore(keyword(Keyword::Impl)) - .then(function::generics()) + keyword(Keyword::Impl) + .ignore_then(function::generics()) .then(parse_type().map_with_span(|typ, span| (typ, span))) .then(where_clause()) .then_ignore(just(Token::LeftBrace)) @@ -228,14 +227,13 @@ fn implementation() -> impl NoirParser { .then_ignore(just(Token::RightBrace)) .map(|args| { let ((other_args, where_clause), methods) = args; - let ((is_comptime, generics), (object_type, type_span)) = other_args; + let (generics, (object_type, type_span)) = other_args; TopLevelStatement::Impl(TypeImpl { generics, object_type, type_span, where_clause, methods, - is_comptime, }) }) } @@ -559,7 +557,7 @@ fn pattern() -> impl NoirParser { .separated_by(just(Token::Comma)) .delimited_by(just(Token::LeftBrace), just(Token::RightBrace)); - let struct_pattern = path() + let struct_pattern = path(super::parse_type()) .then(struct_pattern_fields) .map_with_span(|(typename, fields), span| Pattern::Struct(typename, fields, span)); @@ -1130,7 +1128,7 @@ fn constructor(expr_parser: impl ExprParser) -> impl NoirParser .allow_trailing() .delimited_by(just(Token::LeftBrace), just(Token::RightBrace)); - path().then(args).map(ExpressionKind::constructor) + path(super::parse_type()).then(args).map(ExpressionKind::constructor) } fn constructor_field

(expr_parser: P) -> impl NoirParser<(Ident, Expression)> diff --git a/compiler/noirc_frontend/src/parser/parser/path.rs b/compiler/noirc_frontend/src/parser/parser/path.rs index 5565c392d59..140650af1a2 100644 --- a/compiler/noirc_frontend/src/parser/parser/path.rs +++ b/compiler/noirc_frontend/src/parser/parser/path.rs @@ -1,4 +1,4 @@ -use crate::ast::{Path, PathKind, PathSegment}; +use crate::ast::{Path, PathKind, PathSegment, UnresolvedType}; use crate::parser::NoirParser; use crate::token::{Keyword, Token}; @@ -8,8 +8,10 @@ use chumsky::prelude::*; use super::keyword; use super::primitives::{path_segment, path_segment_no_turbofish}; -pub(super) fn path() -> impl NoirParser { - path_inner(path_segment()) +pub(super) fn path<'a>( + type_parser: impl NoirParser + 'a, +) -> impl NoirParser + 'a { + path_inner(path_segment(type_parser)) } pub(super) fn path_no_turbofish() -> impl NoirParser { @@ -40,13 +42,16 @@ fn empty_path() -> impl NoirParser { } pub(super) fn maybe_empty_path() -> impl NoirParser { - path().or(empty_path()) + path_no_turbofish().or(empty_path()) } #[cfg(test)] mod test { use super::*; - use crate::parser::parser::test_helpers::{parse_all_failing, parse_with}; + use crate::parser::{ + parse_type, + parser::test_helpers::{parse_all_failing, parse_with}, + }; #[test] fn parse_path() { @@ -59,13 +64,13 @@ mod test { ]; for (src, expected_segments) in cases { - let path: Path = parse_with(path(), src).unwrap(); + let path: Path = parse_with(path(parse_type()), src).unwrap(); for (segment, expected) in path.segments.into_iter().zip(expected_segments) { assert_eq!(segment.ident.0.contents, expected); } } - parse_all_failing(path(), vec!["std::", "::std", "std::hash::", "foo::1"]); + parse_all_failing(path(parse_type()), vec!["std::", "::std", "std::hash::", "foo::1"]); } #[test] @@ -78,12 +83,12 @@ mod test { ]; for (src, expected_path_kind) in cases { - let path = parse_with(path(), src).unwrap(); + let path = parse_with(path(parse_type()), src).unwrap(); assert_eq!(path.kind, expected_path_kind); } parse_all_failing( - path(), + path(parse_type()), vec!["crate", "crate::std::crate", "foo::bar::crate", "foo::dep"], ); } diff --git a/compiler/noirc_frontend/src/parser/parser/primitives.rs b/compiler/noirc_frontend/src/parser/parser/primitives.rs index eb8d67b751a..25f693bf504 100644 --- a/compiler/noirc_frontend/src/parser/parser/primitives.rs +++ b/compiler/noirc_frontend/src/parser/parser/primitives.rs @@ -33,10 +33,14 @@ pub(super) fn token_kind(token_kind: TokenKind) -> impl NoirParser { }) } -pub(super) fn path_segment() -> impl NoirParser { - ident() - .then(turbofish(super::parse_type())) - .map_with_span(|(ident, generics), span| PathSegment { ident, generics, span }) +pub(super) fn path_segment<'a>( + type_parser: impl NoirParser + 'a, +) -> impl NoirParser + 'a { + ident().then(turbofish(type_parser)).map_with_span(|(ident, generics), span| PathSegment { + ident, + generics, + span, + }) } pub(super) fn path_segment_no_turbofish() -> impl NoirParser { @@ -96,7 +100,7 @@ pub(super) fn turbofish<'a>( } pub(super) fn variable() -> impl NoirParser { - path().map(ExpressionKind::Variable) + path(super::parse_type()).map(ExpressionKind::Variable) } pub(super) fn variable_no_turbofish() -> impl NoirParser { diff --git a/compiler/noirc_frontend/src/parser/parser/structs.rs b/compiler/noirc_frontend/src/parser/parser/structs.rs index 9a3adf74d7f..58bf1693eee 100644 --- a/compiler/noirc_frontend/src/parser/parser/structs.rs +++ b/compiler/noirc_frontend/src/parser/parser/structs.rs @@ -1,7 +1,6 @@ use chumsky::prelude::*; use crate::ast::{Ident, NoirStruct, UnresolvedType}; -use crate::parser::parser::types::maybe_comp_time; use crate::{ parser::{ parser::{ @@ -29,21 +28,13 @@ pub(super) fn struct_definition() -> impl NoirParser { .or(just(Semicolon).to(Vec::new())); attributes() - .then(maybe_comp_time()) .then_ignore(keyword(Struct)) .then(ident()) .then(function::generics()) .then(fields) - .validate(|((((attributes, is_comptime), name), generics), fields), span, emit| { + .validate(|(((attributes, name), generics), fields), span, emit| { let attributes = validate_secondary_attributes(attributes, span, emit); - TopLevelStatement::Struct(NoirStruct { - name, - attributes, - generics, - fields, - span, - is_comptime, - }) + TopLevelStatement::Struct(NoirStruct { name, attributes, generics, fields, span }) }) } diff --git a/compiler/noirc_frontend/src/parser/parser/traits.rs b/compiler/noirc_frontend/src/parser/parser/traits.rs index 8115255368f..ffcf7e07629 100644 --- a/compiler/noirc_frontend/src/parser/parser/traits.rs +++ b/compiler/noirc_frontend/src/parser/parser/traits.rs @@ -3,7 +3,6 @@ use chumsky::prelude::*; use super::attributes::{attributes, validate_secondary_attributes}; use super::function::function_return_type; use super::path::path_no_turbofish; -use super::types::maybe_comp_time; use super::{block, expression, fresh_statement, function, function_declaration_parameters}; use crate::ast::{ @@ -105,9 +104,8 @@ fn trait_type_declaration() -> impl NoirParser { /// /// trait_implementation: 'impl' generics ident generic_args for type '{' trait_implementation_body '}' pub(super) fn trait_implementation() -> impl NoirParser { - maybe_comp_time() - .then_ignore(keyword(Keyword::Impl)) - .then(function::generics()) + keyword(Keyword::Impl) + .ignore_then(function::generics()) .then(path_no_turbofish()) .then(generic_type_args(parse_type())) .then_ignore(keyword(Keyword::For)) @@ -118,7 +116,7 @@ pub(super) fn trait_implementation() -> impl NoirParser { .then_ignore(just(Token::RightBrace)) .map(|args| { let (((other_args, object_type), where_clause), items) = args; - let (((is_comptime, impl_generics), trait_name), trait_generics) = other_args; + let ((impl_generics, trait_name), trait_generics) = other_args; TopLevelStatement::TraitImpl(NoirTraitImpl { impl_generics, @@ -127,7 +125,6 @@ pub(super) fn trait_implementation() -> impl NoirParser { object_type, items, where_clause, - is_comptime, }) }) } diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index f2b83a48022..a21259a4f0d 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -2610,3 +2610,122 @@ fn turbofish_in_middle_of_variable_unsupported_yet() { CompilationError::TypeError(TypeCheckError::UnsupportedTurbofishUsage { .. }), )); } + +#[test] +fn turbofish_in_struct_pattern() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value: Field = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + assert_no_errors(src); +} + +#[test] +fn turbofish_in_struct_pattern_errors_if_type_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value: Field = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::TypeMismatchWithSource { .. }) = &errors[0].0 + else { + panic!("Expected a type mismatch error, got {:?}", errors[0].0); + }; +} + +#[test] +fn turbofish_in_struct_pattern_generic_count_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::GenericCountMismatch { + item, + expected, + found, + .. + }) = &errors[0].0 + else { + panic!("Expected a generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(item, "struct Foo"); + assert_eq!(*expected, 1); + assert_eq!(*found, 2); +} + +#[test] +fn incorrect_generic_count_on_struct_impl() { + let src = r#" + struct Foo {} + impl Foo {} + fn main() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::IncorrectGenericCount { + actual, + expected, + .. + }) = errors[0].0 + else { + panic!("Expected an incorrect generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(actual, 1); + assert_eq!(expected, 0); +} + +#[test] +fn incorrect_generic_count_on_type_alias() { + let src = r#" + struct Foo {} + type Bar = Foo; + fn main() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::IncorrectGenericCount { + actual, + expected, + .. + }) = errors[0].0 + else { + panic!("Expected an incorrect generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(actual, 1); + assert_eq!(expected, 0); +} diff --git a/cspell.json b/cspell.json index 689b72435ef..b9199bea4bd 100644 --- a/cspell.json +++ b/cspell.json @@ -126,6 +126,7 @@ "memset", "merkle", "metas", + "microcontroller", "minreq", "monomorphization", "monomorphize", @@ -135,6 +136,7 @@ "monomorphizing", "montcurve", "MSRV", + "multicall", "nand", "nargo", "neovim", diff --git a/docs/docs/explainers/cspell.json b/docs/docs/explainers/cspell.json new file mode 100644 index 00000000000..c60b0a597b1 --- /dev/null +++ b/docs/docs/explainers/cspell.json @@ -0,0 +1,5 @@ +{ + "words": [ + "Cryptdoku" + ] +} diff --git a/docs/docs/explainers/explainer-writing-noir.md b/docs/docs/explainers/explainer-writing-noir.md new file mode 100644 index 00000000000..c8a42c379e6 --- /dev/null +++ b/docs/docs/explainers/explainer-writing-noir.md @@ -0,0 +1,173 @@ +--- +title: Writing Performant Noir +description: Understand new considerations when writing Noir +keywords: [Noir, programming, rust] +tags: [Optimization] +sidebar_position: 0 +--- + + +This article intends to set you up with key concepts essential for writing more viable applications that use zero knowledge proofs, namely around efficient circuits. + +## Context - 'Efficient' is subjective + +When writing a web application for a performant computer with high-speed internet connection, writing efficient code sometimes is seen as an afterthought only if needed. Large multiplications running at the innermost of nested loops may not even be on a dev's radar. +When writing firmware for a battery-powered microcontroller, you think of cpu cycles as rations to keep within a product's power budget. + +> Code is written to create applications that perform specific tasks within specific constraints + +And these constraints differ depending on where the compiled code is execute. + +### The Ethereum Virtual Machine (EVM) + +In scenarios where extremely low gas costs are required for an Ethereum application to be viable/competitive, Ethereum smart contract developers get into what is colloquially known as: "*gas golfing*". Finding the lowest execution cost of their compiled code (EVM bytecode) to achieve a specific task. + +The equivalent optimization task when writing zk circuits is affectionately referred to as "*gate golfing*", finding the lowest gate representation of the compiled Noir code. + +### Coding for circuits - a paradigm shift + +In zero knowledge cryptography, code is compiled to "circuits" consisting of arithmetic gates, and gate count is the significant cost. Depending on the proving system this is linearly proportionate to proving time, and so from a product point this should be kept as low as possible. + +Whilst writing efficient code for web apps and Solidity has a few key differences, writing efficient circuits have a different set of considerations. It is a bit of a paradigm shift, like writing code for GPUs for the first time... + +For example, drawing a circle at (0, 0) of radius `r`: +- For a single CPU thread, +``` +for theta in 0..2*pi { + let x = r * cos(theta); + let y = r * sin(theta); + draw(x, y); +} // note: would do 0 - pi/2 and draw +ve/-ve x and y. +``` + +- For GPUs (simultaneous parallel calls with x, y across image), +``` +if (x^2 + y^2 = r^2) { + draw(x, y); +} +``` + +([Related](https://www.youtube.com/watch?v=-P28LKWTzrI)) + +Whilst this CPU -> GPU does not translate to circuits exactly, it is intended to exemplify the difference in intuition when coding for different machine capabilities/constraints. + +### Context Takeaway + +For those coming from a primarily web app background, this article will explain what you need to consider when writing circuits. Furthermore, for those experienced writing efficient machine code, prepare to shift what you think is efficient 😬 + +## Translating from Rust + +For some applications using Noir, existing code might be a convenient starting point to then proceed to optimize the gate count of. + +:::note +Many valuable functions and algorithms have been written in more established languages (C/C++), and converted to modern ones (like Rust). +::: + +Fortunately for Noir developers, when needing a particular function a Rust implementation can be readily compiled into Noir with some key changes. While the compiler does a decent amount of optimizations, it won't be able to change code that has been optimized for clock-cycles into code optimized for arithmetic gates. + +A few things to do when converting Rust code to Noir: +- `println!` is not a macro, use `println` function (same for `assert_eq`) +- No early `return` in function. Use constrain via assertion instead +- No passing by reference. Remove `&` operator to pass by value (copy) +- No boolean operators (`&&`, `||`). Use bitwise operators (`&`, `|`) with boolean values +- No type `usize`. Use types `u8`, `u32`, `u64`, ... +- `main` return must be public, `pub` +- No `const`, use `global` +- Noir's LSP is your friend, so error message should be informative enough to resolve syntax issues. + +## Writing efficient Noir for performant products + +The following points help refine our understanding over time. + +:::note +A Noir program makes a statement that can be verified. +::: + +It compiles to a structure that represents the calculation, and can assert results within the calculation at any stage (via the `constrain` keyword). + +A Noir program compiles to an Abstract Circuit Intermediate Representation which is: + - A tree structure + - Leaves (inputs) are the `Field` type + - Nodes contain arithmetic operations to combine them (gates) + - The root is the final result (return value) + +:::tip +The command `nargo info` shows the programs circuit size, and is useful to compare the value of changes made. +You can dig deeper and use the `--print-acir` param to take a closer look at individual ACIR opcodes, and the proving backend to see its gate count (eg for barretenberg, `bb gates -b ./target/program.json`). +::: + +### Use the `Field` type + +Since the native type of values in circuits are `Field`s, using them for variables in Noir means less gates converting them under the hood. + +:::tip +Where possible, use `Field` type for values. Using smaller value types, and bit-packing strategies, will result in MORE gates +::: + +**Note:** Need to remain mindful of overflow. Types with less bits may be used to limit the range of possible values prior to a calculation. + +### Use Arithmetic over non-arithmetic operations + +Since circuits are made of arithmetic gates, the cost of arithmetic operations tends to be one gate. Whereas for procedural code, they represent several clock cycles. + +Inversely, non-arithmetic operators are achieved with multiple gates, vs 1 clock cycle for procedural code. + +| (cost\op) | arithmetic
(`*`, `+`) | bit-wise ops
(eg `<`, `\|`, `>>`) | +| - | - | - | +| **cycles** | 10+ | 1 | +| **gates** | 1 | 10+ | + +Bit-wise operations (e.g. bit shifts `<<` and `>>`), albeit commonly used in general programming and especially for clock cycle optimizations, are on the contrary expensive in gates when performed within circuits. + +Translate away from bit shifts when writing constrained functions for the best performance. + +On the flip side, feel free to use bit shifts in unconstrained functions and tests if necessary, as they are executed outside of circuits and does not induce performance hits. + +### Use static over dynamic values + +Another general theme that manifests in different ways is that static reads are represented with less gates than dynamic ones. + +Reading from read-only memory (ROM) adds less gates than random-access memory (RAM), 2 vs ~3.25 due to the additional bounds checks. Arrays of fixed length (albeit used at a lower capacity), will generate less gates than dynamic storage. + +Related to this, if an index used to access an array is not known at compile time (ie unknown until run time), then ROM will be converted to RAM, expanding the gate count. + +:::tip +Use arrays and indices that are known at compile time where possible. +Using `assert_constant(i);` before an index, `i`, is used in an array will give a compile error if `i` is NOT known at compile time. +::: + +### Leverage unconstrained execution + +Constrained verification can leverage unconstrained execution, this is especially useful for operations that are represented by many gates. +Use an [unconstrained function](../noir/concepts/unconstrained.md) to perform gate-heavy calculations, then verify and constrain the result. + +Eg division generates more gates than multiplication, so calculating the quotient in an unconstrained function then constraining the product for the quotient and divisor (+ any remainder) equals the dividend will be more efficient. + +Use ` if is_unconstrained() { /`, to conditionally execute code if being called in an unconstrained vs constrained way. + +## Advanced + +Unless you're well into the depth of gate optimization, this advanced section can be ignored. + +### Combine arithmetic operations + +A Noir program can be honed further by combining arithmetic operators in a way that makes the most of each constraint of the backend proving system. This is in scenarios where the backend might not be doing this perfectly. + +Eg Barretenberg backend (current default for Noir) is a width-4 PLONKish constraint system +$ w_1*w_2*q_m + w_1*q_1 + w_2*q_2 + w_3*q_3 + w_4*q_4 + q_c = 0 $ + +Here we see there is one occurrence of witness 1 and 2 ($w_1$, $w_2$) being multiplied together, with addition to witnesses 1-4 ($w_1$ .. $w_4$) multiplied by 4 corresponding circuit constants ($q_1$ .. $q_4$) (plus a final circuit constant, $q_c$). + +Use `nargo info --print-acir`, to inspect the ACIR opcodes (and the proving system for gates), and it may present opportunities to amend the order of operations and reduce the number of constraints. + +#### Variable as witness vs expression + +If you've come this far and really know what you're doing at the equation level, a temporary lever (that will become unnecessary/useless over time) is: `std::as_witness`. This informs the compiler to save a variable as a witness not an expression. + +The compiler will mostly be correct and optimal, but this may help some near term edge cases that are yet to optimize. +Note: When used incorrectly it will create **less** efficient circuits (higher gate count). + +## References +- Guillaume's ["`Cryptdoku`" talk](https://www.youtube.com/watch?v=MrQyzuogxgg) (Jun'23) +- Tips from Tom, Jake and Zac. +- [Idiomatic Noir](https://www.vlayer.xyz/blog/idiomatic-noir-part-1-collections) blog post diff --git a/docs/docs/getting_started/tooling/noir_codegen.md b/docs/docs/reference/noir_codegen.md similarity index 97% rename from docs/docs/getting_started/tooling/noir_codegen.md rename to docs/docs/reference/noir_codegen.md index f7505bef7ab..db8f07dc22e 100644 --- a/docs/docs/getting_started/tooling/noir_codegen.md +++ b/docs/docs/reference/noir_codegen.md @@ -33,7 +33,7 @@ yarn add @noir-lang/noir_codegen -D ``` ### Nargo library -Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../installation/index.md). +Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../getting_started/installation/index.md). If you're in a new project, make a `circuits` folder and create a new Noir library: diff --git a/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml b/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml deleted file mode 100644 index 8bdefbbbd21..00000000000 --- a/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "non_comptime_local_fn_call" -type = "bin" -authors = [""] -compiler_version = ">=0.23.0" - -[dependencies] diff --git a/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr b/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr deleted file mode 100644 index d75bb1a922a..00000000000 --- a/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr +++ /dev/null @@ -1,9 +0,0 @@ -fn main() { - comptime { - let _a = id(3); - } -} - -fn id(x: Field) -> Field { - x -} diff --git a/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr b/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr index 5c99f8c587e..2f2ca89cfb5 100644 --- a/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr +++ b/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr @@ -24,16 +24,16 @@ fn main() { } } -comptime struct TestHasher { +struct TestHasher { result: Field, } -comptime impl Hasher for TestHasher { - comptime fn finish(self) -> Field { +impl Hasher for TestHasher { + fn finish(self) -> Field { self.result } - comptime fn write(&mut self, input: Field) { + fn write(&mut self, input: Field) { self.result += input; } } diff --git a/test_programs/compile_success_empty/comptime_traits/src/main.nr b/test_programs/compile_success_empty/comptime_traits/src/main.nr index 8b1f81e6594..7d1e116dd0c 100644 --- a/test_programs/compile_success_empty/comptime_traits/src/main.nr +++ b/test_programs/compile_success_empty/comptime_traits/src/main.nr @@ -20,8 +20,8 @@ struct MyType { value: i32, } -comptime impl Neg for MyType { - comptime fn neg(self) -> Self { +impl Neg for MyType { + fn neg(self) -> Self { self } } diff --git a/test_programs/compile_success_empty/quoted_as_type/src/main.nr b/test_programs/compile_success_empty/quoted_as_type/src/main.nr index c48ac717bc0..e06294592ca 100644 --- a/test_programs/compile_success_empty/quoted_as_type/src/main.nr +++ b/test_programs/compile_success_empty/quoted_as_type/src/main.nr @@ -7,7 +7,7 @@ comptime fn macro() -> Quoted { quote { let foo: $typ = Foo {}; foo } } -comptime struct Foo {} +struct Foo {} // Ensure we call the Foo impl impl Foo { diff --git a/test_programs/execution_success/derive/Nargo.toml b/test_programs/execution_success/derive/Nargo.toml new file mode 100644 index 00000000000..f3846594305 --- /dev/null +++ b/test_programs/execution_success/derive/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "derive" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/derive/src/main.nr b/test_programs/execution_success/derive/src/main.nr new file mode 100644 index 00000000000..f226817fbaf --- /dev/null +++ b/test_programs/execution_success/derive/src/main.nr @@ -0,0 +1,51 @@ +use std::collections::umap::UHashMap; +use std::hash::BuildHasherDefault; +use std::hash::poseidon2::Poseidon2Hasher; + +#[my_derive(DoNothing)] +struct MyStruct { my_field: u32 } + +type DeriveFunction = fn(StructDefinition) -> Quoted; + +comptime mut global HANDLERS: UHashMap> = UHashMap::default(); + +comptime fn my_derive(s: StructDefinition, traits: [Quoted]) -> Quoted { + let mut result = quote {}; + + for trait_to_derive in traits { + let handler = HANDLERS.get(trait_to_derive.as_trait_constraint()); + assert(handler.is_some(), f"No derive function registered for `{trait_to_derive}`"); + + let trait_impl = handler.unwrap()(s); + result = quote { $result $trait_impl }; + } + + result +} + +unconstrained comptime fn my_derive_via(t: TraitDefinition, f: Quoted) { + HANDLERS.insert(t.as_trait_constraint(), std::meta::unquote!(f)); +} + +#[my_derive_via(derive_do_nothing)] +trait DoNothing { + fn do_nothing(self); +} + +comptime fn derive_do_nothing(s: StructDefinition) -> Quoted { + let typ = s.as_type(); + let generics = s.generics().join(quote {,}); + quote { + impl<$generics> DoNothing for $typ { + fn do_nothing(_self: Self) { + // Traits can't tell us what to do + println("something"); + } + } + } +} + +fn main() { + let s = MyStruct { my_field: 1 }; + s.do_nothing(); +} diff --git a/test_programs/execution_success/regression_5615/Nargo.toml b/test_programs/execution_success/regression_5615/Nargo.toml new file mode 100644 index 00000000000..738d99391a2 --- /dev/null +++ b/test_programs/execution_success/regression_5615/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_5615" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] diff --git a/test_programs/execution_success/regression_5615/src/main.nr b/test_programs/execution_success/regression_5615/src/main.nr new file mode 100644 index 00000000000..afb641e510d --- /dev/null +++ b/test_programs/execution_success/regression_5615/src/main.nr @@ -0,0 +1,12 @@ +use std::collections::umap::UHashMap; +use std::hash::BuildHasherDefault; +use std::hash::poseidon2::Poseidon2Hasher; + +unconstrained fn main() { + comptime + { + let mut map: UHashMap> = UHashMap::default(); + + map.insert(1, 2); + } +} diff --git a/tooling/lsp/src/requests/inlay_hint.rs b/tooling/lsp/src/requests/inlay_hint.rs index 3fc6a6752df..48d72fa0e9a 100644 --- a/tooling/lsp/src/requests/inlay_hint.rs +++ b/tooling/lsp/src/requests/inlay_hint.rs @@ -4,8 +4,8 @@ use std::future::{self, Future}; use async_lsp::ResponseError; use fm::{FileId, FileMap, PathString}; use lsp_types::{ - InlayHint, InlayHintKind, InlayHintLabel, InlayHintLabelPart, InlayHintParams, Position, - TextDocumentPositionParams, + InlayHint, InlayHintKind, InlayHintLabel, InlayHintLabelPart, InlayHintParams, Position, Range, + TextDocumentPositionParams, TextEdit, }; use noirc_errors::{Location, Span}; use noirc_frontend::{ @@ -173,7 +173,7 @@ impl<'a> InlayHintCollector<'a> { self.collect_in_expression(&assign_statement.expression); } StatementKind::For(for_loop_statement) => { - self.collect_in_ident(&for_loop_statement.identifier); + self.collect_in_ident(&for_loop_statement.identifier, false); self.collect_in_expression(&for_loop_statement.block); } StatementKind::Comptime(statement) => self.collect_in_statement(statement), @@ -276,7 +276,7 @@ impl<'a> InlayHintCollector<'a> { match pattern { Pattern::Identifier(ident) => { - self.collect_in_ident(ident); + self.collect_in_ident(ident, true); } Pattern::Mutable(pattern, _span, _is_synthesized) => { self.collect_in_pattern(pattern); @@ -294,7 +294,7 @@ impl<'a> InlayHintCollector<'a> { } } - fn collect_in_ident(&mut self, ident: &Ident) { + fn collect_in_ident(&mut self, ident: &Ident, editable: bool) { if !self.options.type_hints.enabled { return; } @@ -308,17 +308,17 @@ impl<'a> InlayHintCollector<'a> { let global_info = self.interner.get_global(global_id); let definition_id = global_info.definition_id; let typ = self.interner.definition_type(definition_id); - self.push_type_hint(lsp_location, &typ); + self.push_type_hint(lsp_location, &typ, editable); } ReferenceId::Local(definition_id) => { let typ = self.interner.definition_type(definition_id); - self.push_type_hint(lsp_location, &typ); + self.push_type_hint(lsp_location, &typ, editable); } ReferenceId::StructMember(struct_id, field_index) => { let struct_type = self.interner.get_struct(struct_id); let struct_type = struct_type.borrow(); let (_field_name, field_type) = struct_type.field_at(field_index); - self.push_type_hint(lsp_location, field_type); + self.push_type_hint(lsp_location, field_type, false); } ReferenceId::Module(_) | ReferenceId::Struct(_) @@ -331,7 +331,7 @@ impl<'a> InlayHintCollector<'a> { } } - fn push_type_hint(&mut self, location: lsp_types::Location, typ: &Type) { + fn push_type_hint(&mut self, location: lsp_types::Location, typ: &Type, editable: bool) { let position = location.range.end; let mut parts = Vec::new(); @@ -342,7 +342,14 @@ impl<'a> InlayHintCollector<'a> { position, label: InlayHintLabel::LabelParts(parts), kind: Some(InlayHintKind::TYPE), - text_edits: None, + text_edits: if editable { + Some(vec![TextEdit { + range: Range { start: location.range.end, end: location.range.end }, + new_text: format!(": {}", typ), + }]) + } else { + None + }, tooltip: None, padding_left: None, padding_right: None, @@ -756,8 +763,10 @@ mod inlay_hints_tests { let inlay_hints = get_inlay_hints(0, 3, type_hints()).await; assert_eq!(inlay_hints.len(), 1); + let position = Position { line: 1, character: 11 }; + let inlay_hint = &inlay_hints[0]; - assert_eq!(inlay_hint.position, Position { line: 1, character: 11 }); + assert_eq!(inlay_hint.position, position); if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { assert_eq!(labels.len(), 2); @@ -770,6 +779,14 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": Field".to_string(), + }]) + ); } #[test] @@ -777,8 +794,10 @@ mod inlay_hints_tests { let inlay_hints = get_inlay_hints(12, 15, type_hints()).await; assert_eq!(inlay_hints.len(), 1); + let position = Position { line: 13, character: 11 }; + let inlay_hint = &inlay_hints[0]; - assert_eq!(inlay_hint.position, Position { line: 13, character: 11 }); + assert_eq!(inlay_hint.position, position); if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { assert_eq!(labels.len(), 2); @@ -798,6 +817,34 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": Foo".to_string(), + }]) + ); + } + + #[test] + async fn test_type_inlay_hints_in_struct_member_pattern() { + let inlay_hints = get_inlay_hints(94, 96, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 95, character: 24 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "i32"); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + + assert_eq!(inlay_hint.text_edits, None); } #[test] @@ -816,6 +863,8 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!(inlay_hint.text_edits, None); } #[test] @@ -823,8 +872,10 @@ mod inlay_hints_tests { let inlay_hints = get_inlay_hints(19, 21, type_hints()).await; assert_eq!(inlay_hints.len(), 1); + let position = Position { line: 20, character: 10 }; + let inlay_hint = &inlay_hints[0]; - assert_eq!(inlay_hint.position, Position { line: 20, character: 10 }); + assert_eq!(inlay_hint.position, position); if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { assert_eq!(labels.len(), 2); @@ -834,6 +885,14 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": Field".to_string(), + }]) + ); } #[test] @@ -855,6 +914,7 @@ mod inlay_hints_tests { let inlay_hint = &inlay_hints[0]; assert_eq!(inlay_hint.position, Position { line: 25, character: 12 }); + assert_eq!(inlay_hint.text_edits, None); if let InlayHintLabel::String(label) = &inlay_hint.label { assert_eq!(label, "one: "); } else { @@ -863,6 +923,7 @@ mod inlay_hints_tests { let inlay_hint = &inlay_hints[1]; assert_eq!(inlay_hint.position, Position { line: 25, character: 15 }); + assert_eq!(inlay_hint.text_edits, None); if let InlayHintLabel::String(label) = &inlay_hint.label { assert_eq!(label, "two: "); } else { @@ -877,6 +938,7 @@ mod inlay_hints_tests { let inlay_hint = &inlay_hints[0]; assert_eq!(inlay_hint.position, Position { line: 38, character: 18 }); + assert_eq!(inlay_hint.text_edits, None); if let InlayHintLabel::String(label) = &inlay_hint.label { assert_eq!(label, "one: "); } else { diff --git a/tooling/lsp/test_programs/inlay_hints/src/main.nr b/tooling/lsp/test_programs/inlay_hints/src/main.nr index 2b53f8de339..762bb6300f8 100644 --- a/tooling/lsp/test_programs/inlay_hints/src/main.nr +++ b/tooling/lsp/test_programs/inlay_hints/src/main.nr @@ -92,3 +92,6 @@ fn call_yet_another_function() { yet_another_function(some_name) // Should not show parameter names ("name" is a suffix of "some_name") } +fn struct_member_hint() { + let SomeStruct { one } = SomeStruct { one: 1 }; +} \ No newline at end of file