From 48995b4571e7cbe2c36c516d7a7ae9e543105132 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pedro=20Sousa?= Date: Mon, 24 Apr 2023 19:02:30 +0100 Subject: [PATCH 01/66] chore: adding workflow to add issues to docs on docs needed label (#1178) * chore: adding workflow to add issues to docs on docs needed label * chore: adding suggestions * chore: adding suggestions * Update .github/workflows/label_docs.yml Co-authored-by: Blaine Bublitz * chore: adding suggestions * fixing accidental reversion of the gh token * Update .github/workflows/label_docs.yml Co-authored-by: Blaine Bublitz * pushing for CI --------- Co-authored-by: kevaundray Co-authored-by: Blaine Bublitz --- .github/workflows/label_docs.yml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/label_docs.yml diff --git a/.github/workflows/label_docs.yml b/.github/workflows/label_docs.yml new file mode 100644 index 00000000000..c7c20c7b157 --- /dev/null +++ b/.github/workflows/label_docs.yml @@ -0,0 +1,32 @@ +name: Notify Doc Needed + +on: + pull_request_target: + types: + - labeled + - unlabeled + +jobs: + dispatch: + runs-on: ubuntu-latest + if: github.event.label.name == 'doc needed' + + steps: + - uses: actions/checkout@v3 + - name: Set workflowId environment variable + id: set_workflow_id + run: | + if [[ "${{ github.event.action }}" == "labeled" ]]; then + echo "workflowId=new-migrated-issue.yml" >> $GITHUB_ENV + else + echo "workflowId=delete-migrated-issue.yml" >> $GITHUB_ENV + fi + + - name: Dispatch + uses: benc-uk/workflow-dispatch@v1 + with: + workflow: ${{ env.workflowId }} + repo: noir-lang/docs + ref: master + token: ${{ secrets.DOCS_REPO_TOKEN }} + inputs: '{ "pr_number": "${{ github.event.pull_request.number }}" }' From cca45a4980aebc041742be57f80a3428b26284cc Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 24 Apr 2023 15:38:55 -0400 Subject: [PATCH 02/66] chore(ssa refactor): Handle function parameters (#1203) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Correctly handle function parameters * Rename Nested to Tree; add comment --- .../src/ssa_refactor/ir/basic_block.rs | 19 +++- .../src/ssa_refactor/ir/dfg.rs | 36 +++++++- .../src/ssa_refactor/ir/function.rs | 32 ++----- .../src/ssa_refactor/ir/instruction.rs | 10 ++ .../src/ssa_refactor/ir/types.rs | 21 +++++ .../ssa_builder/function_builder.rs | 16 +++- .../src/ssa_refactor/ssa_gen/context.rs | 91 +++++++++++++++++-- .../src/ssa_refactor/ssa_gen/mod.rs | 42 ++++----- .../src/ssa_refactor/ssa_gen/value.rs | 33 ++++++- .../src/monomorphization/ast.rs | 4 +- 10 files changed, 237 insertions(+), 67 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index b11c4dc3f1c..431f1647863 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -35,7 +35,24 @@ pub(crate) struct BasicBlock { pub(crate) type BasicBlockId = Id; impl BasicBlock { - pub(super) fn new(parameters: Vec) -> Self { + pub(crate) fn new(parameters: Vec) -> Self { Self { parameters, instructions: Vec::new(), is_sealed: false, terminator: None } } + + pub(crate) fn parameters(&self) -> &[ValueId] { + &self.parameters + } + + pub(crate) fn add_parameter(&mut self, parameter: ValueId) { + self.parameters.push(parameter); + } + + /// Insert an instruction at the end of this block + pub(crate) fn insert_instruction(&mut self, instruction: InstructionId) { + self.instructions.push(instruction); + } + + pub(crate) fn set_terminator(&mut self, terminator: TerminatorInstruction) { + self.terminator = Some(terminator); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index ad6d614fec0..b456fd08ee4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -7,6 +7,8 @@ use super::{ value::{Value, ValueId}, }; +use iter_extended::vecmap; + #[derive(Debug, Default)] /// A convenience wrapper to store `Value`s. pub(crate) struct ValueList(Vec>); @@ -61,9 +63,31 @@ pub(crate) struct DataFlowGraph { } impl DataFlowGraph { - /// Creates a new `empty` basic block + /// Creates a new basic block with no parameters. + /// After being created, the block is unreachable in the current function + /// until another block is made to jump to it. pub(crate) fn new_block(&mut self) -> BasicBlockId { - todo!() + self.blocks.insert(BasicBlock::new(Vec::new())) + } + + /// Creates a new basic block with the given parameters. + /// After being created, the block is unreachable in the current function + /// until another block is made to jump to it. + pub(crate) fn new_block_with_parameters( + &mut self, + parameter_types: impl Iterator, + ) -> BasicBlockId { + self.blocks.insert_with_id(|entry_block| { + let parameters = vecmap(parameter_types.enumerate(), |(position, typ)| { + self.values.insert(Value::Param { block: entry_block, position, typ }) + }); + + BasicBlock::new(parameters) + }) + } + + pub(crate) fn block_parameters(&self, block: BasicBlockId) -> &[ValueId] { + self.blocks[block].parameters() } /// Inserts a new instruction into the DFG. @@ -149,6 +173,14 @@ impl DataFlowGraph { pub(crate) fn instruction_results(&self, instruction_id: InstructionId) -> &[ValueId] { self.results.get(&instruction_id).expect("expected a list of Values").as_slice() } + + pub(crate) fn add_block_parameter(&mut self, block_id: BasicBlockId, typ: Type) -> Id { + let block = &mut self.blocks[block_id]; + let position = block.parameters().len(); + let parameter = self.values.insert(Value::Param { block: block_id, position, typ }); + block.add_parameter(parameter); + parameter + } } #[cfg(test)] diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 2509a85f435..1abd6c85367 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,11 +1,9 @@ -use super::basic_block::{BasicBlock, BasicBlockId}; +use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; use super::instruction::Instruction; -use super::map::{DenseMap, Id, SecondaryMap}; +use super::map::{Id, SecondaryMap}; use super::types::Type; -use super::value::Value; -use iter_extended::vecmap; use noirc_errors::Location; /// A function holds a list of instructions. @@ -16,35 +14,23 @@ use noirc_errors::Location; /// into the current function's context. #[derive(Debug)] pub(crate) struct Function { - /// Basic blocks associated to this particular function - basic_blocks: DenseMap, - /// Maps instructions to source locations source_locations: SecondaryMap, /// The first basic block in the function entry_block: BasicBlockId, - dfg: DataFlowGraph, + pub(crate) dfg: DataFlowGraph, } impl Function { - pub(crate) fn new(parameter_count: usize) -> Self { + /// Creates a new function with an automatically inserted entry block. + /// + /// Note that any parameters to the function must be manually added later. + pub(crate) fn new() -> Self { let mut dfg = DataFlowGraph::default(); - let mut basic_blocks = DenseMap::default(); - - // The parameters for each function are stored as the block parameters - // of the function's entry block - let entry_block = basic_blocks.insert_with_id(|entry_block| { - // TODO: Give each parameter its correct type - let parameters = vecmap(0..parameter_count, |i| { - dfg.make_value(Value::Param { block: entry_block, position: i, typ: Type::Unit }) - }); - - BasicBlock::new(parameters) - }); - - Self { basic_blocks, source_locations: SecondaryMap::new(), entry_block, dfg } + let entry_block = dfg.new_block(); + Self { source_locations: SecondaryMap::new(), entry_block, dfg } } pub(crate) fn entry_block(&self) -> BasicBlockId { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 1d5089179d5..81a28b8407c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -129,10 +129,20 @@ pub(crate) enum TerminatorInstruction { else_destination: BasicBlockId, arguments: Vec, }, + /// Unconditional Jump /// /// Jumps to specified `destination` with `arguments` Jmp { destination: BasicBlockId, arguments: Vec }, + + /// Return from the current function with the given return values. + /// + /// All finished functions should have exactly 1 return instruction. + /// Functions with early returns should instead be structured to + /// unconditionally jump to a single exit block with the return values + /// as the block arguments. Then the exit block can terminate in a return + /// instruction returning these values. + Return { return_values: Vec }, } /// A binary instruction in the IR. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index f2797423e30..e1f8e8a74d2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -18,6 +18,27 @@ pub(crate) enum NumericType { pub(crate) enum Type { /// Represents numeric types in the IR, including field elements Numeric(NumericType), + + /// A reference to some value, such as an array + Reference, + + /// A function that may be called directly + Function, + /// The Unit type with a single value Unit, } + +impl Type { + pub(crate) fn signed(bit_size: u32) -> Type { + Type::Numeric(NumericType::Signed { bit_size }) + } + + pub(crate) fn unsigned(bit_size: u32) -> Type { + Type::Numeric(NumericType::Unsigned { bit_size }) + } + + pub(crate) fn field() -> Type { + Type::Numeric(NumericType::NativeField) + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index 8d90a95332e..5e82226d3be 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -1,6 +1,8 @@ use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, + types::Type, + value::ValueId, }; use super::SharedBuilderContext; @@ -24,8 +26,8 @@ pub(crate) struct FunctionBuilder<'ssa> { } impl<'ssa> FunctionBuilder<'ssa> { - pub(crate) fn new(parameters: usize, context: &'ssa SharedBuilderContext) -> Self { - let new_function = Function::new(parameters); + pub(crate) fn new(context: &'ssa SharedBuilderContext) -> Self { + let new_function = Function::new(); let current_block = new_function.entry_block(); Self { @@ -38,12 +40,11 @@ impl<'ssa> FunctionBuilder<'ssa> { } /// Finish the current function and create a new function - pub(crate) fn new_function(&mut self, parameters: usize) { - let new_function = Function::new(parameters); + pub(crate) fn new_function(&mut self) { + let new_function = Function::new(); let old_function = std::mem::replace(&mut self.current_function, new_function); self.finished_functions.push((self.current_function_id, old_function)); - self.current_function_id = self.global_context.next_function(); } @@ -51,4 +52,9 @@ impl<'ssa> FunctionBuilder<'ssa> { self.finished_functions.push((self.current_function_id, self.current_function)); self.finished_functions } + + pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { + let entry = self.current_function.entry_block(); + self.current_function.dfg.add_block_parameter(entry, typ) + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 94fedb7b4cf..02bfee8a87f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -1,21 +1,24 @@ use std::collections::HashMap; use std::sync::{Mutex, RwLock}; -use noirc_frontend::monomorphization::ast::{self, LocalId}; +use iter_extended::vecmap; +use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; +use noirc_frontend::Signedness; +use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ssa_builder::SharedBuilderContext; use crate::ssa_refactor::{ ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, }; -use super::value::Value; +use super::value::{Tree, Values}; // TODO: Make this a threadsafe queue so we can compile functions in parallel type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { - definitions: HashMap, + definitions: HashMap, function_builder: FunctionBuilder<'a>, shared_context: &'a SharedContext, } @@ -29,22 +32,90 @@ pub(super) struct SharedContext { impl<'a> FunctionContext<'a> { pub(super) fn new( - parameter_count: usize, + parameters: &Parameters, shared_context: &'a SharedContext, shared_builder_context: &'a SharedBuilderContext, ) -> Self { - Self { + let mut this = Self { definitions: HashMap::new(), - function_builder: FunctionBuilder::new(parameter_count, shared_builder_context), + function_builder: FunctionBuilder::new(shared_builder_context), shared_context, + }; + this.add_parameters_to_scope(parameters); + this + } + + pub(super) fn new_function(&mut self, parameters: &Parameters) { + self.definitions.clear(); + self.function_builder.new_function(); + self.add_parameters_to_scope(parameters); + } + + /// Add each parameter to the current scope, and return the list of parameter types. + /// + /// The returned parameter type list will be flattened, so any struct parameters will + /// be returned as one entry for each field (recursively). + fn add_parameters_to_scope(&mut self, parameters: &Parameters) { + for (id, _, _, typ) in parameters { + self.add_parameter_to_scope(*id, typ); + } + } + + /// Adds a "single" parameter to scope. + /// + /// Single is in quotes here because in the case of tuple parameters, the tuple is flattened + /// into a new parameter for each field recursively. + fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { + // Add a separate parameter for each field type in 'parameter_type' + let parameter_value = self + .map_type(parameter_type, |this, typ| this.function_builder.add_parameter(typ).into()); + + self.definitions.insert(parameter_id, parameter_value); + } + + /// Maps the given type to a Tree of the result type. + /// + /// This can be used to (for example) flatten a tuple type, creating + /// and returning a new parameter for each field type. + pub(super) fn map_type( + &mut self, + typ: &ast::Type, + mut f: impl FnMut(&mut Self, Type) -> T, + ) -> Tree { + self.map_type_helper(typ, &mut f) + } + + // This helper is needed because we need to take f by mutable reference, + // otherwise we cannot move it multiple times each loop of vecmap. + fn map_type_helper( + &mut self, + typ: &ast::Type, + f: &mut impl FnMut(&mut Self, Type) -> T, + ) -> Tree { + match typ { + ast::Type::Tuple(fields) => { + Tree::Branch(vecmap(fields, |field| self.map_type_helper(field, f))) + } + other => Tree::Leaf(f(self, Self::convert_non_tuple_type(other))), } } - pub(super) fn new_function(&mut self, parameters: impl ExactSizeIterator) { - self.function_builder.new_function(parameters.len()); + pub(super) fn convert_non_tuple_type(typ: &ast::Type) -> Type { + match typ { + ast::Type::Field => Type::field(), + ast::Type::Array(_, _) => Type::Reference, + ast::Type::Integer(Signedness::Signed, bits) => Type::signed(*bits), + ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned(*bits), + ast::Type::Bool => Type::unsigned(1), + ast::Type::String(_) => Type::Reference, + ast::Type::Unit => Type::Unit, + ast::Type::Tuple(_) => panic!("convert_non_tuple_type called on a tuple: {typ}"), + ast::Type::Function(_, _) => Type::Function, - for (_i, _parameter) in parameters.enumerate() { - todo!("Add block param to definitions") + // How should we represent Vecs? + // Are they a struct of array + length + capacity? + // Or are they just references? + ast::Type::Vec(_) => Type::Reference, } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 1da65fafd48..c340b45eb9b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -5,7 +5,7 @@ use context::SharedContext; use noirc_errors::Location; use noirc_frontend::monomorphization::ast::{self, Expression, Program}; -use self::{context::FunctionContext, value::Value}; +use self::{context::FunctionContext, value::Values}; use super::ssa_builder::SharedBuilderContext; @@ -14,22 +14,20 @@ pub(crate) fn generate_ssa(program: Program) { let builder_context = SharedBuilderContext::default(); let main = context.program.main(); - // TODO struct parameter counting - let parameter_count = main.parameters.len(); - let mut function_context = FunctionContext::new(parameter_count, &context, &builder_context); + let mut function_context = FunctionContext::new(&main.parameters, &context, &builder_context); function_context.codegen_expression(&main.body); while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; // TODO: Need to ensure/assert the new function's id == new_id - function_context.new_function(function.parameters.iter().map(|(id, ..)| *id)); + function_context.new_function(&function.parameters); function_context.codegen_expression(&function.body); } } impl<'a> FunctionContext<'a> { - fn codegen_expression(&mut self, expr: &Expression) -> Value { + fn codegen_expression(&mut self, expr: &Expression) -> Values { match expr { Expression::Ident(ident) => self.codegen_ident(ident), Expression::Literal(literal) => self.codegen_literal(literal), @@ -54,67 +52,67 @@ impl<'a> FunctionContext<'a> { } } - fn codegen_ident(&mut self, _ident: &ast::Ident) -> Value { + fn codegen_ident(&mut self, _ident: &ast::Ident) -> Values { todo!() } - fn codegen_literal(&mut self, _literal: &ast::Literal) -> Value { + fn codegen_literal(&mut self, _literal: &ast::Literal) -> Values { todo!() } - fn codegen_block(&mut self, _block: &[Expression]) -> Value { + fn codegen_block(&mut self, _block: &[Expression]) -> Values { todo!() } - fn codegen_unary(&mut self, _unary: &ast::Unary) -> Value { + fn codegen_unary(&mut self, _unary: &ast::Unary) -> Values { todo!() } - fn codegen_binary(&mut self, _binary: &ast::Binary) -> Value { + fn codegen_binary(&mut self, _binary: &ast::Binary) -> Values { todo!() } - fn codegen_index(&mut self, _index: &ast::Index) -> Value { + fn codegen_index(&mut self, _index: &ast::Index) -> Values { todo!() } - fn codegen_cast(&mut self, _cast: &ast::Cast) -> Value { + fn codegen_cast(&mut self, _cast: &ast::Cast) -> Values { todo!() } - fn codegen_for(&mut self, _for_expr: &ast::For) -> Value { + fn codegen_for(&mut self, _for_expr: &ast::For) -> Values { todo!() } - fn codegen_if(&mut self, _if_expr: &ast::If) -> Value { + fn codegen_if(&mut self, _if_expr: &ast::If) -> Values { todo!() } - fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Value { + fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Values { todo!() } - fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Value { + fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Values { todo!() } - fn codegen_call(&mut self, _call: &ast::Call) -> Value { + fn codegen_call(&mut self, _call: &ast::Call) -> Values { todo!() } - fn codegen_let(&mut self, _let_expr: &ast::Let) -> Value { + fn codegen_let(&mut self, _let_expr: &ast::Let) -> Values { todo!() } - fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Value { + fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Values { todo!() } - fn codegen_assign(&mut self, _assign: &ast::Assign) -> Value { + fn codegen_assign(&mut self, _assign: &ast::Assign) -> Values { todo!() } - fn codegen_semi(&mut self, _semi: &Expression) -> Value { + fn codegen_semi(&mut self, _semi: &Expression) -> Values { todo!() } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 785ae3cd8f7..4b41c6ae102 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,13 +1,40 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; -use crate::ssa_refactor::ir::value::ValueId; +use crate::ssa_refactor::ir::value::ValueId as IrValueId; + +pub(super) enum Tree { + Branch(Vec>), + Leaf(T), +} #[derive(Debug, Clone)] pub(super) enum Value { - Normal(ValueId), + Normal(IrValueId), Function(IrFunctionId), - Tuple(Vec), /// Lazily inserting unit values helps prevent cluttering the IR with too many /// unit literals. Unit, } + +pub(super) type Values = Tree; + +impl Tree { + pub(super) fn flatten(self) -> Vec { + match self { + Tree::Branch(values) => values.into_iter().flat_map(Tree::flatten).collect(), + Tree::Leaf(value) => vec![value], + } + } +} + +impl From for Values { + fn from(id: IrValueId) -> Self { + Self::Leaf(Value::Normal(id)) + } +} + +impl From for Value { + fn from(id: IrValueId) -> Self { + Value::Normal(id) + } +} diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 6a2b97ae19d..e4339c8e367 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -175,12 +175,14 @@ pub enum LValue { MemberAccess { object: Box, field_index: usize }, } +pub type Parameters = Vec<(LocalId, /*mutable:*/ bool, /*name:*/ String, Type)>; + #[derive(Debug, Clone)] pub struct Function { pub id: FuncId, pub name: String, - pub parameters: Vec<(LocalId, /*mutable:*/ bool, /*name:*/ String, Type)>, + pub parameters: Parameters, pub body: Expression, pub return_type: Type, From ac87a806a4aa70eddca22587f6cfcb68c8c992e7 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Mon, 24 Apr 2023 21:20:45 +0100 Subject: [PATCH 03/66] chore(ci): add cache for Nix workflow (#1210) * chore(ci): Add cache for Nix workflow * align lockfile with other projects * Include both flake.lock and cargo.lock in hash --- .github/workflows/test.yml | 28 +++++++++++++++++++++++++++- flake.lock | 24 ++++++++++++------------ flake.nix | 3 +++ 3 files changed, 42 insertions(+), 13 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 220985d8003..29f58ad519c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -30,6 +30,32 @@ jobs: nix_path: nixpkgs=channel:nixos-22.11 github_access_token: ${{ secrets.GITHUB_TOKEN }} + - uses: cachix/cachix-action@v12 + with: + name: barretenberg + + - name: Restore nix store cache + id: nix-store-cache + uses: actions/cache@v3 + with: + path: /tmp/nix-cache + key: ${{ runner.os }}-flake-${{ hashFiles('*.lock') }} + + # Based on https://github.com/marigold-dev/deku/blob/b5016f0cf4bf6ac48db9111b70dd7fb49b969dfd/.github/workflows/build.yml#L26 + - name: Copy cache into nix store + if: steps.nix-store-cache.outputs.cache-hit == 'true' + # We don't check the signature because we're the one that created the cache + run: | + for narinfo in /tmp/nix-cache/*.narinfo; do + path=$(head -n 1 "$narinfo" | awk '{print $2}') + nix copy --no-check-sigs --from "file:///tmp/nix-cache" "$path" + done + - name: Run `nix flake check` run: | - nix flake check + nix flake check -L + + - name: Export cache from nix store + if: steps.nix-store-cache.outputs.cache-hit != 'true' + run: | + nix copy --to "file:///tmp/nix-cache?compression=zstd¶llel-compression=true" .#cargo-artifacts diff --git a/flake.lock b/flake.lock index 04dbc188a52..7c01326f86d 100644 --- a/flake.lock +++ b/flake.lock @@ -10,11 +10,11 @@ ] }, "locked": { - "lastModified": 1682019675, - "narHash": "sha256-KZ/VL/u81z2sFTdwfxvUFR+ftqf3+2AA0gR9kkgKxe4=", + "lastModified": 1682345890, + "narHash": "sha256-ZsInK9Iy81MaCugouU3ifa5Vw2GKlJK9MxCU/LF8bIw=", "owner": "AztecProtocol", "repo": "barretenberg", - "rev": "a38e3611590e085e5f25c322757871fb048aa3d7", + "rev": "87aeb375d7b434e0faf47abb79f97753ab760987", "type": "github" }, "original": { @@ -39,11 +39,11 @@ ] }, "locked": { - "lastModified": 1681680516, - "narHash": "sha256-EB8Adaeg4zgcYDJn9sR6UMjN/OHdIiMMK19+3LmmXQY=", + "lastModified": 1681177078, + "narHash": "sha256-ZNIjBDou2GOabcpctiQykEQVkI8BDwk7TyvlWlI4myE=", "owner": "ipetkov", "repo": "crane", - "rev": "54b63c8eae4c50172cb50b612946ff1d2bc1c75c", + "rev": "0c9f468ff00576577d83f5019a66c557ede5acf6", "type": "github" }, "original": { @@ -88,11 +88,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1681932375, - "narHash": "sha256-tSXbYmpnKSSWpzOrs27ie8X3I0yqKA6AuCzCYNtwbCU=", + "lastModified": 1681269223, + "narHash": "sha256-i6OeI2f7qGvmLfD07l1Az5iBL+bFeP0RHixisWtpUGo=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "3d302c67ab8647327dba84fbdb443cdbf0e82744", + "rev": "87edbd74246ccdfa64503f334ed86fa04010bab9", "type": "github" }, "original": { @@ -122,11 +122,11 @@ ] }, "locked": { - "lastModified": 1681957132, - "narHash": "sha256-52GaHyeLyyiT0u4OL3uGbo0vsUMKm33Z3zLkPyK/ZRY=", + "lastModified": 1681352318, + "narHash": "sha256-+kwy7bTsuW8GYrRqWRQ8T5hg6duZb5IJiHlKo1J+v9g=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "4771640d46c214d702512a8ece591f582ae507fa", + "rev": "aeaa11c65a5c5cebaa51652353ab3c497b9a7bbf", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 74f19f612f2..fc8e5fa6868 100644 --- a/flake.nix +++ b/flake.nix @@ -165,6 +165,9 @@ packages.default = noir; + # We expose the `cargo-artifacts` derivation so we can cache our cargo dependencies in CI + packages.cargo-artifacts = cargoArtifacts; + # TODO(#1197): Look into installable apps with Nix flakes # apps.default = flake-utils.lib.mkApp { drv = nargo; }; From dc3fb4806564acb6fd6ec46fcdc68dc336da96ba Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 24 Apr 2023 18:19:28 -0400 Subject: [PATCH 04/66] chore(ssa refactor): Handle codegen for literals (#1209) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Correctly handle function parameters * Rename Nested to Tree; add comment * Add codegen for literals * PR feedback * chore(ssa refactor): Add debug printing for the new ssa ir (#1211) Implement debug printing for the new ssa ir --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 2 + .../src/ssa_refactor/ir/basic_block.rs | 22 ++++ .../src/ssa_refactor/ir/constant.rs | 56 +++++++++ .../src/ssa_refactor/ir/dfg.rs | 76 ++++++++++-- .../src/ssa_refactor/ir/function.rs | 15 ++- .../src/ssa_refactor/ir/instruction.rs | 60 ++++++--- .../src/ssa_refactor/ir/map.rs | 54 ++++++++ .../src/ssa_refactor/ir/printer.rs | 115 ++++++++++++++++++ .../src/ssa_refactor/ir/types.rs | 21 ++++ .../src/ssa_refactor/ir/value.rs | 5 +- .../ssa_builder/function_builder.rs | 59 ++++++++- .../src/ssa_refactor/ssa_gen/context.rs | 33 ++--- .../src/ssa_refactor/ssa_gen/mod.rs | 65 +++++++++- .../src/ssa_refactor/ssa_gen/value.rs | 43 ++++++- 14 files changed, 566 insertions(+), 60 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index ce63bdc7238..851b86e511f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,7 +1,9 @@ pub(crate) mod basic_block; +pub(crate) mod constant; pub(crate) mod dfg; pub(crate) mod function; pub(crate) mod instruction; pub(crate) mod map; +pub(crate) mod printer; pub(crate) mod types; pub(crate) mod value; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index 431f1647863..13d1b3ca6f8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -52,7 +52,29 @@ impl BasicBlock { self.instructions.push(instruction); } + pub(crate) fn instructions(&self) -> &[InstructionId] { + &self.instructions + } + pub(crate) fn set_terminator(&mut self, terminator: TerminatorInstruction) { self.terminator = Some(terminator); } + + pub(crate) fn terminator(&self) -> Option<&TerminatorInstruction> { + self.terminator.as_ref() + } + + /// Iterate over all the successors of the currently block, as determined by + /// the blocks jumped to in the terminator instruction. If there is no terminator + /// instruction yet, this will iterate 0 times. + pub(crate) fn successors(&self) -> impl ExactSizeIterator { + match &self.terminator { + Some(TerminatorInstruction::Jmp { destination, .. }) => vec![*destination].into_iter(), + Some(TerminatorInstruction::JmpIf { then_destination, else_destination, .. }) => { + vec![*then_destination, *else_destination].into_iter() + } + Some(TerminatorInstruction::Return { .. }) => vec![].into_iter(), + None => vec![].into_iter(), + } + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs new file mode 100644 index 00000000000..6d5538d3410 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs @@ -0,0 +1,56 @@ +use acvm::FieldElement; + +use super::map::Id; + +/// Represents a numeric constant in Ssa. Constants themselves are +/// uniqued in the DataFlowGraph and immutable. +/// +/// This is just a thin wrapper around FieldElement so that +/// we can use Id without it getting confused +/// with a possible future use of Id. +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub(crate) struct NumericConstant(FieldElement); + +impl NumericConstant { + pub(crate) fn new(value: FieldElement) -> Self { + Self(value) + } + + pub(crate) fn value(&self) -> &FieldElement { + &self.0 + } +} + +pub(crate) type NumericConstantId = Id; + +impl std::ops::Add for NumericConstant { + type Output = NumericConstant; + + fn add(self, rhs: Self) -> Self::Output { + Self::new(self.0 + rhs.0) + } +} + +impl std::ops::Sub for NumericConstant { + type Output = NumericConstant; + + fn sub(self, rhs: Self) -> Self::Output { + Self::new(self.0 - rhs.0) + } +} + +impl std::ops::Mul for NumericConstant { + type Output = NumericConstant; + + fn mul(self, rhs: Self) -> Self::Output { + Self::new(self.0 * rhs.0) + } +} + +impl std::ops::Div for NumericConstant { + type Output = NumericConstant; + + fn div(self, rhs: Self) -> Self::Output { + Self::new(self.0 / rhs.0) + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index b456fd08ee4..f92cae79b75 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -1,12 +1,14 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, + constant::{NumericConstant, NumericConstantId}, function::Signature, instruction::{Instruction, InstructionId}, - map::{DenseMap, Id, SecondaryMap}, + map::{DenseMap, Id, SecondaryMap, TwoWayMap}, types::Type, value::{Value, ValueId}, }; +use acvm::FieldElement; use iter_extended::vecmap; #[derive(Debug, Default)] @@ -20,6 +22,7 @@ impl ValueList { self.0.push(value); self.len() - 1 } + /// Returns the number of values in the list. fn len(&self) -> usize { self.0.len() @@ -29,6 +32,7 @@ impl ValueList { fn clear(&mut self) { self.0.clear(); } + /// Returns the ValueId's as a slice. pub(crate) fn as_slice(&self) -> &[ValueId] { &self.0 @@ -55,6 +59,11 @@ pub(crate) struct DataFlowGraph { /// function. values: DenseMap, + /// Storage for all constants used within a function. + /// Each constant is unique, attempting to insert the same constant + /// twice will return the same ConstantId. + constants: TwoWayMap, + /// Function signatures of external methods signatures: DenseMap, @@ -91,27 +100,35 @@ impl DataFlowGraph { } /// Inserts a new instruction into the DFG. + /// This does not add the instruction to the block or populate the instruction's result list pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { let id = self.instructions.insert(instruction_data); - // Create a new vector to store the potential results for the instruction. self.results.insert(id, Default::default()); id } + /// Insert a value into the dfg's storage and return an id to reference it. + /// Until the value is used in an instruction it is unreachable. pub(crate) fn make_value(&mut self, value: Value) -> ValueId { self.values.insert(value) } - /// Attaches results to the instruction. + /// Creates a new constant value, or returns the Id to an existing one if + /// one already exists. + pub(crate) fn make_constant(&mut self, value: FieldElement, typ: Type) -> ValueId { + let constant = self.constants.insert(NumericConstant::new(value)); + self.values.insert(Value::NumericConstant { constant, typ }) + } + + /// Attaches results to the instruction, clearing any previous results. /// - /// Returns the number of results that this instruction - /// produces. + /// Returns the results of the instruction pub(crate) fn make_instruction_results( &mut self, instruction_id: InstructionId, ctrl_typevar: Type, - ) -> usize { + ) -> &[ValueId] { // Clear all of the results instructions associated with this // instruction. self.results.get_mut(&instruction_id).expect("all instructions should have a `result` allocation when instruction was added to the DFG").clear(); @@ -119,13 +136,14 @@ impl DataFlowGraph { // Get all of the types that this instruction produces // and append them as results. let typs = self.instruction_result_types(instruction_id, ctrl_typevar); - let num_typs = typs.len(); for typ in typs { self.append_result(instruction_id, typ); } - num_typs + self.results.get_mut(&instruction_id) + .expect("all instructions should have a `result` allocation when instruction was added to the DFG") + .as_slice() } /// Return the result types of this instruction. @@ -181,6 +199,42 @@ impl DataFlowGraph { block.add_parameter(parameter); parameter } + + pub(crate) fn insert_instruction_in_block( + &mut self, + block: BasicBlockId, + instruction: InstructionId, + ) { + self.blocks[block].insert_instruction(instruction); + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = Instruction; + fn index(&self, id: InstructionId) -> &Self::Output { + &self.instructions[id] + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = Value; + fn index(&self, id: ValueId) -> &Self::Output { + &self.values[id] + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = NumericConstant; + fn index(&self, id: NumericConstantId) -> &Self::Output { + &self.constants[id] + } +} + +impl std::ops::Index for DataFlowGraph { + type Output = BasicBlock; + fn index(&self, id: BasicBlockId) -> &Self::Output { + &self.blocks[id] + } } #[cfg(test)] @@ -190,19 +244,17 @@ mod tests { instruction::Instruction, types::{NumericType, Type}, }; - use acvm::FieldElement; #[test] fn make_instruction() { let mut dfg = DataFlowGraph::default(); - let ins = Instruction::Immediate { value: FieldElement::from(0u128) }; + let ins = Instruction::Allocate { size: 20 }; let ins_id = dfg.make_instruction(ins); let num_results = - dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)); + dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)).len(); let results = dfg.instruction_results(ins_id); - assert_eq!(results.len(), num_results); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 1abd6c85367..63cd31142c4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -18,7 +18,10 @@ pub(crate) struct Function { source_locations: SecondaryMap, /// The first basic block in the function - entry_block: BasicBlockId, + pub(super) entry_block: BasicBlockId, + + /// Name of the function for debugging only + pub(super) name: String, pub(crate) dfg: DataFlowGraph, } @@ -27,10 +30,10 @@ impl Function { /// Creates a new function with an automatically inserted entry block. /// /// Note that any parameters to the function must be manually added later. - pub(crate) fn new() -> Self { + pub(crate) fn new(name: String) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.new_block(); - Self { source_locations: SecondaryMap::new(), entry_block, dfg } + Self { name, source_locations: SecondaryMap::new(), entry_block, dfg } } pub(crate) fn entry_block(&self) -> BasicBlockId { @@ -47,6 +50,12 @@ pub(crate) struct Signature { pub(crate) returns: Vec, } +impl std::fmt::Display for Function { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + super::printer::display_function(self, f) + } +} + #[test] fn sign_smoke() { let mut signature = Signature::default(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 81a28b8407c..442f1dbd47e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,5 +1,3 @@ -use acvm::FieldElement; - use super::{ basic_block::BasicBlockId, function::FunctionId, map::Id, types::Type, value::ValueId, }; @@ -17,6 +15,12 @@ pub(crate) type InstructionId = Id; /// of this is println. pub(crate) struct IntrinsicOpcodes; +impl std::fmt::Display for IntrinsicOpcodes { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!("intrinsics have no opcodes yet") + } +} + #[derive(Debug, PartialEq, Eq, Hash, Clone)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. @@ -38,18 +42,24 @@ pub(crate) enum Instruction { /// Performs a function call with a list of its arguments. Call { func: FunctionId, arguments: Vec }, + /// Performs a call to an intrinsic function and stores the /// results in `return_arguments`. Intrinsic { func: IntrinsicOpcodes, arguments: Vec }, + /// Allocates a region of memory. Note that this is not concerned with + /// the type of memory, the type of element is determined when loading this memory. + /// + /// `size` is the size of the region to be allocated by the number of FieldElements it + /// contains. Note that non-numeric types like Functions and References are counted as 1 field + /// each. + Allocate { size: u32 }, + /// Loads a value from memory. - Load(ValueId), + Load { address: ValueId }, /// Writes a value to memory. - Store { destination: ValueId, value: ValueId }, - - /// Stores an Immediate value - Immediate { value: FieldElement }, + Store { address: ValueId, value: ValueId }, } impl Instruction { @@ -67,28 +77,31 @@ impl Instruction { // This also returns 0, but we could get it a compile time, // since we know the signatures for the intrinsics Instruction::Intrinsic { .. } => 0, - Instruction::Load(_) => 1, + Instruction::Allocate { .. } => 1, + Instruction::Load { .. } => 1, Instruction::Store { .. } => 0, - Instruction::Immediate { .. } => 1, } } /// Returns the number of arguments required for a call pub(crate) fn num_fixed_arguments(&self) -> usize { + // Match-all fields syntax (..) is avoided on most cases of this match to ensure that + // if an extra argument is ever added to any of these variants, an error + // is issued pointing to this spot to update it here as well. match self { Instruction::Binary(_) => 2, - Instruction::Cast(..) => 1, + Instruction::Cast(_, _) => 1, Instruction::Not(_) => 1, - Instruction::Truncate { .. } => 1, + Instruction::Truncate { value: _, bit_size: _, max_bit_size: _ } => 1, Instruction::Constrain(_) => 1, // This returns 0 as the arguments depend on the function being called Instruction::Call { .. } => 0, // This also returns 0, but we could get it a compile time, // since we know the function definition for the intrinsics Instruction::Intrinsic { .. } => 0, - Instruction::Load(_) => 1, - Instruction::Store { .. } => 2, - Instruction::Immediate { .. } => 0, + Instruction::Allocate { size: _ } => 1, + Instruction::Load { address: _ } => 1, + Instruction::Store { address: _, value: _ } => 2, } } @@ -102,9 +115,9 @@ impl Instruction { Instruction::Constrain(_) => vec![], Instruction::Call { .. } => vec![], Instruction::Intrinsic { .. } => vec![], - Instruction::Load(_) => vec![ctrl_typevar], + Instruction::Allocate { .. } => vec![Type::Reference], + Instruction::Load { .. } => vec![ctrl_typevar], Instruction::Store { .. } => vec![], - Instruction::Immediate { .. } => vec![], } } } @@ -182,5 +195,18 @@ pub(crate) enum BinaryOp { /// Checks whether two types are equal. /// Returns true if the types were not equal and /// false otherwise. - Ne, + Neq, +} + +impl std::fmt::Display for BinaryOp { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BinaryOp::Add => write!(f, "add"), + BinaryOp::Sub => write!(f, "sub"), + BinaryOp::Mul => write!(f, "mul"), + BinaryOp::Div => write!(f, "div"), + BinaryOp::Eq => write!(f, "eq"), + BinaryOp::Neq => write!(f, "neq"), + } + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 53a7db3a5d5..bb526076e3b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -1,5 +1,6 @@ use std::{ collections::HashMap, + hash::Hash, sync::atomic::{AtomicUsize, Ordering}, }; @@ -68,6 +69,12 @@ impl std::fmt::Debug for Id { } } +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "${}", self.index) + } +} + /// A DenseMap is a Vec wrapper where each element corresponds /// to a unique ID that can be used to access the element. No direct /// access to indices is provided. Since IDs must be stable and correspond @@ -186,6 +193,53 @@ impl std::ops::IndexMut> for SparseMap { } } +/// A TwoWayMap is a map from both key to value and value to key. +/// This is accomplished by keeping the map bijective - for every +/// value there is exactly one key and vice-versa. Any duplicate values +/// are prevented in the call to insert. +#[derive(Debug)] +pub(crate) struct TwoWayMap { + key_to_value: HashMap, T>, + value_to_key: HashMap>, +} + +impl TwoWayMap { + /// Returns the number of elements in the map. + pub(crate) fn len(&self) -> usize { + self.key_to_value.len() + } + + /// Adds an element to the map. + /// Returns the identifier/reference to that element. + pub(crate) fn insert(&mut self, element: T) -> Id { + if let Some(existing) = self.value_to_key.get(&element) { + return *existing; + } + + let id = Id::new(self.key_to_value.len()); + self.key_to_value.insert(id, element.clone()); + self.value_to_key.insert(element, id); + id + } +} + +impl Default for TwoWayMap { + fn default() -> Self { + Self { key_to_value: HashMap::new(), value_to_key: HashMap::new() } + } +} + +// Note that there is no impl for IndexMut>, +// if we allowed mutable access to map elements they may be +// mutated such that elements are no longer unique +impl std::ops::Index> for TwoWayMap { + type Output = T; + + fn index(&self, id: Id) -> &Self::Output { + &self.key_to_value[&id] + } +} + /// A SecondaryMap is for storing secondary data for a given key. Since this /// map is for secondary data, it will not return fresh Ids for data, instead /// it expects users to provide these ids in order to associate existing ids with diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs new file mode 100644 index 00000000000..1a7737e97b0 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -0,0 +1,115 @@ +//! This file is for pretty-printing the SSA IR in a human-readable form for debugging. +use std::fmt::{Formatter, Result}; + +use iter_extended::vecmap; + +use super::{ + basic_block::BasicBlockId, + function::Function, + instruction::{Instruction, InstructionId, TerminatorInstruction}, + value::ValueId, +}; + +pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { + writeln!(f, "fn {} {{", function.name)?; + display_block_with_successors(function, function.entry_block, f)?; + write!(f, "}}") +} + +pub(crate) fn display_block_with_successors( + function: &Function, + block_id: BasicBlockId, + f: &mut Formatter, +) -> Result { + display_block(function, block_id, f)?; + + for successor in function.dfg[block_id].successors() { + display_block(function, successor, f)?; + } + Ok(()) +} + +pub(crate) fn display_block( + function: &Function, + block_id: BasicBlockId, + f: &mut Formatter, +) -> Result { + let block = &function.dfg[block_id]; + + writeln!(f, "{}({}):", block_id, value_list(block.parameters()))?; + + for instruction in block.instructions() { + display_instruction(function, *instruction, f)?; + } + + display_terminator(block.terminator(), f) +} + +fn value_list(values: &[ValueId]) -> String { + vecmap(values, ToString::to_string).join(", ") +} + +pub(crate) fn display_terminator( + terminator: Option<&TerminatorInstruction>, + f: &mut Formatter, +) -> Result { + match terminator { + Some(TerminatorInstruction::Jmp { destination, arguments }) => { + writeln!(f, " jmp {}({})", destination, value_list(arguments)) + } + Some(TerminatorInstruction::JmpIf { + condition, + arguments, + then_destination, + else_destination, + }) => { + let args = value_list(arguments); + writeln!( + f, + " jmpif {}({}) then: {}, else: {}", + condition, args, then_destination, else_destination + ) + } + Some(TerminatorInstruction::Return { return_values }) => { + writeln!(f, " return {}", value_list(return_values)) + } + None => writeln!(f, " (no terminator instruction)"), + } +} + +pub(crate) fn display_instruction( + function: &Function, + instruction: InstructionId, + f: &mut Formatter, +) -> Result { + // instructions are always indented within a function + write!(f, " ")?; + + let results = function.dfg.instruction_results(instruction); + if !results.is_empty() { + write!(f, "{} = ", value_list(results))?; + } + + match &function.dfg[instruction] { + Instruction::Binary(binary) => { + writeln!(f, "{} {}, {}", binary.operator, binary.lhs, binary.rhs) + } + Instruction::Cast(value, typ) => writeln!(f, "cast {value} as {typ}"), + Instruction::Not(value) => writeln!(f, "not {value}"), + Instruction::Truncate { value, bit_size, max_bit_size } => { + writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}") + } + Instruction::Constrain(value) => { + writeln!(f, "constrain {value}") + } + Instruction::Call { func, arguments } => { + writeln!(f, "call {func}({})", value_list(arguments)) + } + Instruction::Intrinsic { func, arguments } => { + writeln!(f, "intrinsic {func}({})", value_list(arguments)) + } + Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), + Instruction::Load { address } => writeln!(f, "load {address}"), + Instruction::Store { address, value } => writeln!(f, "store {value} at {address}"), + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index e1f8e8a74d2..888d7d128d1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -42,3 +42,24 @@ impl Type { Type::Numeric(NumericType::NativeField) } } + +impl std::fmt::Display for Type { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Type::Numeric(numeric) => numeric.fmt(f), + Type::Reference => write!(f, "reference"), + Type::Function => write!(f, "function"), + Type::Unit => write!(f, "unit"), + } + } +} + +impl std::fmt::Display for NumericType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + NumericType::Signed { bit_size } => write!(f, "i{bit_size}"), + NumericType::Unsigned { bit_size } => write!(f, "u{bit_size}"), + NumericType::NativeField => write!(f, "Field"), + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 38ca8b12c40..537eabb0cab 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,6 +1,6 @@ use crate::ssa_refactor::ir::basic_block::BasicBlockId; -use super::{instruction::InstructionId, map::Id, types::Type}; +use super::{constant::NumericConstantId, instruction::InstructionId, map::Id, types::Type}; pub(crate) type ValueId = Id; @@ -24,4 +24,7 @@ pub(crate) enum Value { /// /// position -- the index of this Value in the block parameters list Param { block: BasicBlockId, position: usize, typ: Type }, + + /// This Value originates from a numeric constant + NumericConstant { constant: NumericConstantId, typ: Type }, } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index 5e82226d3be..c76d2943abe 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -1,6 +1,9 @@ +use acvm::FieldElement; + use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, + instruction::{Binary, BinaryOp, Instruction, InstructionId}, types::Type, value::ValueId, }; @@ -26,8 +29,8 @@ pub(crate) struct FunctionBuilder<'ssa> { } impl<'ssa> FunctionBuilder<'ssa> { - pub(crate) fn new(context: &'ssa SharedBuilderContext) -> Self { - let new_function = Function::new(); + pub(crate) fn new(function_name: String, context: &'ssa SharedBuilderContext) -> Self { + let new_function = Function::new(function_name); let current_block = new_function.entry_block(); Self { @@ -40,8 +43,8 @@ impl<'ssa> FunctionBuilder<'ssa> { } /// Finish the current function and create a new function - pub(crate) fn new_function(&mut self) { - let new_function = Function::new(); + pub(crate) fn new_function(&mut self, name: String) { + let new_function = Function::new(name); let old_function = std::mem::replace(&mut self.current_function, new_function); self.finished_functions.push((self.current_function_id, old_function)); @@ -57,4 +60,52 @@ impl<'ssa> FunctionBuilder<'ssa> { let entry = self.current_function.entry_block(); self.current_function.dfg.add_block_parameter(entry, typ) } + + /// Insert a numeric constant into the current function + pub(crate) fn numeric_constant(&mut self, value: FieldElement, typ: Type) -> ValueId { + self.current_function.dfg.make_constant(value, typ) + } + + /// Insert a numeric constant into the current function of type Field + pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::field()) + } + + fn insert_instruction(&mut self, instruction: Instruction) -> InstructionId { + let id = self.current_function.dfg.make_instruction(instruction); + self.current_function.dfg.insert_instruction_in_block(self.current_block, id); + id + } + + /// Insert an allocate instruction at the end of the current block, allocating the + /// given amount of field elements. Returns the result of the allocate instruction, + /// which is always a Reference to the allocated data. + pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { + let id = self.insert_instruction(Instruction::Allocate { size: size_to_allocate }); + self.current_function.dfg.make_instruction_results(id, Type::Reference)[0] + } + + /// Insert a Load instruction at the end of the current block, loading from the given address + /// which should point to a previous Allocate instruction. Note that this is limited to loading + /// a single value. Loading multiple values (such as a tuple) will require multiple loads. + /// Returns the element that was loaded. + pub(crate) fn insert_load(&mut self, address: ValueId, type_to_load: Type) -> ValueId { + let id = self.insert_instruction(Instruction::Load { address }); + self.current_function.dfg.make_instruction_results(id, type_to_load)[0] + } + + /// Insert a Store instruction at the end of the current block, storing the given element + /// at the given address. Expects that the address points to a previous Allocate instruction. + pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { + self.insert_instruction(Instruction::Store { address, value }); + } + + /// Insert a Store instruction at the end of the current block, storing the given element + /// at the given address. Expects that the address points to a previous Allocate instruction. + /// Returns the result of the add instruction. + pub(crate) fn insert_add(&mut self, lhs: ValueId, rhs: ValueId, typ: Type) -> ValueId { + let operator = BinaryOp::Add; + let id = self.insert_instruction(Instruction::Binary(Binary { lhs, rhs, operator })); + self.current_function.dfg.make_instruction_results(id, typ)[0] + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 02bfee8a87f..32133feea13 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -19,7 +19,7 @@ type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { definitions: HashMap, - function_builder: FunctionBuilder<'a>, + pub(super) builder: FunctionBuilder<'a>, shared_context: &'a SharedContext, } @@ -32,22 +32,23 @@ pub(super) struct SharedContext { impl<'a> FunctionContext<'a> { pub(super) fn new( + function_name: String, parameters: &Parameters, shared_context: &'a SharedContext, shared_builder_context: &'a SharedBuilderContext, ) -> Self { let mut this = Self { definitions: HashMap::new(), - function_builder: FunctionBuilder::new(shared_builder_context), + builder: FunctionBuilder::new(function_name, shared_builder_context), shared_context, }; this.add_parameters_to_scope(parameters); this } - pub(super) fn new_function(&mut self, parameters: &Parameters) { + pub(super) fn new_function(&mut self, name: String, parameters: &Parameters) { self.definitions.clear(); - self.function_builder.new_function(); + self.builder.new_function(name); self.add_parameters_to_scope(parameters); } @@ -67,8 +68,8 @@ impl<'a> FunctionContext<'a> { /// into a new parameter for each field recursively. fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { // Add a separate parameter for each field type in 'parameter_type' - let parameter_value = self - .map_type(parameter_type, |this, typ| this.function_builder.add_parameter(typ).into()); + let parameter_value = + self.map_type(parameter_type, |this, typ| this.builder.add_parameter(typ).into()); self.definitions.insert(parameter_id, parameter_value); } @@ -82,24 +83,28 @@ impl<'a> FunctionContext<'a> { typ: &ast::Type, mut f: impl FnMut(&mut Self, Type) -> T, ) -> Tree { - self.map_type_helper(typ, &mut f) + Self::map_type_helper(typ, &mut |typ| f(self, typ)) } // This helper is needed because we need to take f by mutable reference, // otherwise we cannot move it multiple times each loop of vecmap. - fn map_type_helper( - &mut self, - typ: &ast::Type, - f: &mut impl FnMut(&mut Self, Type) -> T, - ) -> Tree { + fn map_type_helper(typ: &ast::Type, f: &mut impl FnMut(Type) -> T) -> Tree { match typ { ast::Type::Tuple(fields) => { - Tree::Branch(vecmap(fields, |field| self.map_type_helper(field, f))) + Tree::Branch(vecmap(fields, |field| Self::map_type_helper(field, f))) } - other => Tree::Leaf(f(self, Self::convert_non_tuple_type(other))), + other => Tree::Leaf(f(Self::convert_non_tuple_type(other))), } } + /// Convert a monomorphized type to an SSA type, preserving the structure + /// of any tuples within. + pub(super) fn convert_type(typ: &ast::Type) -> Tree { + // Do nothing in the closure here - map_type_helper already calls + // convert_non_tuple_type internally. + Self::map_type_helper(typ, &mut |x| x) + } + pub(super) fn convert_non_tuple_type(typ: &ast::Type) -> Type { match typ { ast::Type::Field => Type::field(), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index c340b45eb9b..2f9c6646282 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -1,27 +1,33 @@ mod context; mod value; +use acvm::FieldElement; use context::SharedContext; +use iter_extended::vecmap; use noirc_errors::Location; use noirc_frontend::monomorphization::ast::{self, Expression, Program}; -use self::{context::FunctionContext, value::Values}; +use self::{ + context::FunctionContext, + value::{Tree, Values}, +}; -use super::ssa_builder::SharedBuilderContext; +use super::{ir::types::Type, ssa_builder::SharedBuilderContext}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); let builder_context = SharedBuilderContext::default(); let main = context.program.main(); + let mut function_context = + FunctionContext::new(main.name.clone(), &main.parameters, &context, &builder_context); - let mut function_context = FunctionContext::new(&main.parameters, &context, &builder_context); function_context.codegen_expression(&main.body); while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; // TODO: Need to ensure/assert the new function's id == new_id - function_context.new_function(&function.parameters); + function_context.new_function(function.name.clone(), &function.parameters); function_context.codegen_expression(&function.body); } } @@ -56,8 +62,55 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_literal(&mut self, _literal: &ast::Literal) -> Values { - todo!() + fn codegen_literal(&mut self, literal: &ast::Literal) -> Values { + match literal { + ast::Literal::Array(array) => { + let elements = vecmap(&array.contents, |element| self.codegen_expression(element)); + let element_type = Self::convert_type(&array.element_type); + self.codegen_array(elements, element_type) + } + ast::Literal::Integer(value, typ) => { + let typ = Self::convert_non_tuple_type(typ); + self.builder.numeric_constant(*value, typ).into() + } + ast::Literal::Bool(value) => { + // Booleans are represented as u1s with 0 = false, 1 = true + let typ = Type::unsigned(1); + let value = FieldElement::from(*value as u128); + self.builder.numeric_constant(value, typ).into() + } + ast::Literal::Str(string) => { + let elements = vecmap(string.as_bytes(), |byte| { + let value = FieldElement::from(*byte as u128); + self.builder.numeric_constant(value, Type::field()).into() + }); + self.codegen_array(elements, Tree::Leaf(Type::field())) + } + } + } + + fn codegen_array(&mut self, elements: Vec, element_type: Tree) -> Values { + let size = element_type.size_of_type() * elements.len(); + let array = self.builder.insert_allocate(size.try_into().unwrap_or_else(|_| { + panic!("Cannot allocate {size} bytes for array, it does not fit into a u32") + })); + + // Now we must manually store all the elements into the array + let mut i = 0; + for element in elements { + element.for_each(|value| { + let address = if i == 0 { + array + } else { + let offset = self.builder.numeric_constant((i as u128).into(), Type::field()); + self.builder.insert_add(array, offset, Type::field()) + }; + self.builder.insert_store(address, value.eval()); + i += 1; + }); + } + + array.into() } fn codegen_block(&mut self, _block: &[Expression]) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 4b41c6ae102..c3911d367c1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,4 +1,5 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; +use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; pub(super) enum Tree { @@ -10,10 +11,18 @@ pub(super) enum Tree { pub(super) enum Value { Normal(IrValueId), Function(IrFunctionId), +} - /// Lazily inserting unit values helps prevent cluttering the IR with too many - /// unit literals. - Unit, +impl Value { + /// Evaluate a value, returning an IrValue from it. + /// This has no effect on Value::Normal, but any variables will be updated with their latest + /// use. + pub(super) fn eval(self) -> IrValueId { + match self { + Value::Normal(value) => value, + Value::Function(_) => panic!("Tried to evaluate a function value"), + } + } } pub(super) type Values = Tree; @@ -25,6 +34,25 @@ impl Tree { Tree::Leaf(value) => vec![value], } } + + pub(super) fn count_leaves(&self) -> usize { + match self { + Tree::Branch(trees) => trees.iter().map(|tree| tree.count_leaves()).sum(), + Tree::Leaf(_) => 1, + } + } + + /// Iterates over each Leaf node, calling f on each value within. + pub(super) fn for_each(self, mut f: impl FnMut(T)) { + self.for_each_helper(&mut f); + } + + fn for_each_helper(self, f: &mut impl FnMut(T)) { + match self { + Tree::Branch(trees) => trees.into_iter().for_each(|tree| tree.for_each_helper(f)), + Tree::Leaf(value) => f(value), + } + } } impl From for Values { @@ -38,3 +66,12 @@ impl From for Value { Value::Normal(id) } } + +// Specialize this impl just to give a better name for this function +impl Tree { + /// Returns the size of the type in terms of the number of FieldElements it contains. + /// Non-field types like functions and references are also counted as 1 FieldElement. + pub(super) fn size_of_type(&self) -> usize { + self.count_leaves() + } +} From f3fe1218bd4d41c0d459ea7af0105ad45f14e9e3 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Tue, 25 Apr 2023 14:49:49 +0100 Subject: [PATCH 05/66] chore(ssa): Add intial control flow graph (#1200) * Add Context structs and start ssa gen pass * Fix block arguments * Fix clippy lint * chore(ssa): cfg * Use the correct dfg * Rename contexts to highlight the inner contexts are shared rather than used directly * Correctly handle function parameters * Rename Nested to Tree; add comment * chore(ssa refactor): fix up merge regressions * chore(ssa refactor): tidy up * chore(ssa refactor): rm iterator type aliases * chore(ssa refactor): handle return inst gets blocks via dfg * chore(ssa refactor): cfg tests * chore(ssa refactor): add cfg test comments * chore(ssa refactor): cfg - merge related fixes * chore(ssa refactor): fix cfg tests --------- Co-authored-by: Jake Fecher --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 2 + .../ssa_refactor/ir/basic_block_visitors.rs | 23 ++ .../src/ssa_refactor/ir/cfg.rs | 251 ++++++++++++++++++ .../src/ssa_refactor/ir/dfg.rs | 17 ++ .../src/ssa_refactor/ir/map.rs | 8 + 5 files changed, 301 insertions(+) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index 851b86e511f..1a1ca9eab89 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,4 +1,6 @@ pub(crate) mod basic_block; +pub(crate) mod basic_block_visitors; +pub(crate) mod cfg; pub(crate) mod constant; pub(crate) mod dfg; pub(crate) mod function; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs new file mode 100644 index 00000000000..e0d5dc1b3df --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs @@ -0,0 +1,23 @@ +use super::{ + basic_block::{BasicBlock, BasicBlockId}, + instruction::TerminatorInstruction, +}; + +/// Visit all successors of a block with a given visitor closure. The closure +/// arguments are the branch instruction that is used to reach the successor, +/// and the id of the successor block itself. +pub(crate) fn visit_block_succs(basic_block: &BasicBlock, mut visit: F) { + match basic_block + .terminator() + .expect("ICE: No terminator indicates block is still under construction.") + { + TerminatorInstruction::Jmp { destination, .. } => visit(*destination), + TerminatorInstruction::JmpIf { then_destination, else_destination, .. } => { + visit(*then_destination); + visit(*else_destination); + } + TerminatorInstruction::Return { .. } => { + // The last block of the control flow - no successors + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs new file mode 100644 index 00000000000..05b64e30ed8 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -0,0 +1,251 @@ +use std::collections::{HashMap, HashSet}; + +use super::{ + basic_block::{BasicBlock, BasicBlockId}, + basic_block_visitors, + function::Function, +}; + +/// A container for the successors and predecessors of some Block. +#[derive(Clone, Default)] +struct CfgNode { + /// Set of blocks that containing jumps that target this block. + /// The predecessor set has no meaningful order. + pub(crate) predecessors: HashSet, + + /// Set of blocks that are the targets of jumps in this block. + /// The successors set has no meaningful order. + pub(crate) successors: HashSet, +} + +/// The Control Flow Graph maintains a mapping of blocks to their predecessors +/// and successors where predecessors are basic blocks and successors are +/// basic blocks. +pub(crate) struct ControlFlowGraph { + data: HashMap, +} + +impl ControlFlowGraph { + /// Allocate and compute the control flow graph for `func`. + pub(crate) fn with_function(func: &Function) -> Self { + let mut cfg = ControlFlowGraph { data: HashMap::new() }; + cfg.compute(func); + cfg + } + + fn compute(&mut self, func: &Function) { + for (basic_block_id, basic_block) in func.dfg.basic_blocks_iter() { + self.compute_block(basic_block_id, basic_block); + } + } + + fn compute_block(&mut self, basic_block_id: BasicBlockId, basic_block: &BasicBlock) { + basic_block_visitors::visit_block_succs(basic_block, |dest| { + self.add_edge(basic_block_id, dest); + }); + } + + fn invalidate_block_successors(&mut self, basic_block_id: BasicBlockId) { + let node = self + .data + .get_mut(&basic_block_id) + .expect("ICE: Attempted to invalidate cfg node successors for non-existent node."); + let old_successors = node.successors.clone(); + node.successors.clear(); + for successor_id in old_successors { + self.data + .get_mut(&successor_id) + .expect("ICE: Cfg node successor doesn't exist.") + .predecessors + .remove(&basic_block_id); + } + } + + /// Recompute the control flow graph of `block`. + /// + /// This is for use after modifying instructions within a specific block. It recomputes all edges + /// from `basic_block_id` while leaving edges to `basic_block_id` intact. + pub(crate) fn recompute_block(&mut self, func: &Function, basic_block_id: BasicBlockId) { + self.invalidate_block_successors(basic_block_id); + let basic_block = &func.dfg[basic_block_id]; + self.compute_block(basic_block_id, basic_block); + } + + fn add_edge(&mut self, from: BasicBlockId, to: BasicBlockId) { + let predecessor_node = self.data.entry(from).or_default(); + assert!( + predecessor_node.successors.len() < 2, + "ICE: A cfg node cannot have more than two successors" + ); + predecessor_node.successors.insert(to); + let successor_node = self.data.entry(to).or_default(); + assert!( + successor_node.predecessors.len() < 2, + "ICE: A cfg node cannot have more than two predecessors" + ); + successor_node.predecessors.insert(from); + } + + /// Get an iterator over the CFG predecessors to `basic_block_id`. + pub(crate) fn pred_iter( + &self, + basic_block_id: BasicBlockId, + ) -> impl ExactSizeIterator + '_ { + self.data + .get(&basic_block_id) + .expect("ICE: Attempted to iterate predecessors of block not found within cfg.") + .predecessors + .iter() + .copied() + } + + /// Get an iterator over the CFG successors to `basic_block_id`. + pub(crate) fn succ_iter( + &self, + basic_block_id: BasicBlockId, + ) -> impl ExactSizeIterator + '_ { + self.data + .get(&basic_block_id) + .expect("ICE: Attempted to iterate successors of block not found within cfg.") + .successors + .iter() + .copied() + } +} + +#[cfg(test)] +mod tests { + use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, types::Type}; + + use super::{super::function::Function, ControlFlowGraph}; + + #[test] + fn empty() { + let mut func = Function::new("func".into()); + let block_id = func.entry_block(); + func.dfg[block_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); + + ControlFlowGraph::with_function(&func); + } + + #[test] + fn jumps() { + // Build function of form + // fn func { + // block0(cond: u1): + // jmpif cond(), then: block2, else: block1 + // block1(): + // jmpif cond(), then: block1, else: block2 + // block2(): + // return + // } + let mut func = Function::new("func".into()); + let block0_id = func.entry_block(); + let cond = func.dfg.add_block_parameter(block0_id, Type::unsigned(1)); + let block1_id = func.dfg.new_block(); + let block2_id = func.dfg.new_block(); + + func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { + condition: cond, + then_destination: block2_id, + else_destination: block1_id, + arguments: vec![], + }); + func.dfg[block1_id].set_terminator(TerminatorInstruction::JmpIf { + condition: cond, + then_destination: block1_id, + else_destination: block2_id, + arguments: vec![], + }); + func.dfg[block2_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); + + let mut cfg = ControlFlowGraph::with_function(&func); + + { + let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); + let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); + let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + + let block0_successors = cfg.succ_iter(block0_id).collect::>(); + let block1_successors = cfg.succ_iter(block1_id).collect::>(); + let block2_successors = cfg.succ_iter(block2_id).collect::>(); + + assert_eq!(block0_predecessors.len(), 0); + assert_eq!(block1_predecessors.len(), 2); + assert_eq!(block2_predecessors.len(), 2); + + assert_eq!(block1_predecessors.contains(&block0_id), true); + assert_eq!(block1_predecessors.contains(&block1_id), true); + assert_eq!(block2_predecessors.contains(&block0_id), true); + assert_eq!(block2_predecessors.contains(&block1_id), true); + + assert_eq!(block0_successors.len(), 2); + assert_eq!(block1_successors.len(), 2); + assert_eq!(block2_successors.len(), 0); + + assert_eq!(block0_successors.contains(&block1_id), true); + assert_eq!(block0_successors.contains(&block2_id), true); + assert_eq!(block1_successors.contains(&block1_id), true); + assert_eq!(block1_successors.contains(&block2_id), true); + } + + // Modify function to form: + // fn func { + // block0(cond: u1): + // jmpif cond(), then: block1, else: ret_block + // block1(): + // jmpif cond(), then: block1, else: block2 + // block2(): + // jmp ret_block + // ret_block(): + // return + // } + let ret_block_id = func.dfg.new_block(); + func.dfg[ret_block_id] + .set_terminator(TerminatorInstruction::Return { return_values: vec![] }); + func.dfg[block2_id].set_terminator(TerminatorInstruction::Jmp { + destination: ret_block_id, + arguments: vec![], + }); + func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { + condition: cond, + then_destination: block1_id, + else_destination: ret_block_id, + arguments: vec![], + }); + + // Recompute new and changed blocks + cfg.recompute_block(&mut func, block0_id); + cfg.recompute_block(&mut func, block2_id); + cfg.recompute_block(&mut func, ret_block_id); + + { + let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); + let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); + let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + + let block0_successors = cfg.succ_iter(block0_id).collect::>(); + let block1_successors = cfg.succ_iter(block1_id).collect::>(); + let block2_successors = cfg.succ_iter(block2_id).collect::>(); + + assert_eq!(block0_predecessors.len(), 0); + assert_eq!(block1_predecessors.len(), 2); + assert_eq!(block2_predecessors.len(), 1); + + assert_eq!(block1_predecessors.contains(&block0_id), true); + assert_eq!(block1_predecessors.contains(&block1_id), true); + assert_eq!(block2_predecessors.contains(&block0_id), false); + assert_eq!(block2_predecessors.contains(&block1_id), true); + + assert_eq!(block0_successors.len(), 2); + assert_eq!(block1_successors.len(), 2); + assert_eq!(block2_successors.len(), 1); + + assert_eq!(block0_successors.contains(&block1_id), true); + assert_eq!(block0_successors.contains(&ret_block_id), true); + assert_eq!(block1_successors.contains(&block1_id), true); + assert_eq!(block1_successors.contains(&block2_id), true); + assert_eq!(block2_successors.contains(&ret_block_id), true); + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index f92cae79b75..c21fc2c3f35 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -95,6 +95,16 @@ impl DataFlowGraph { }) } + /// Get an iterator over references to each basic block within the dfg, paired with the basic + /// block's id. + /// + /// The pairs are order by id, which is not guaranteed to be meaningful. + pub(crate) fn basic_blocks_iter( + &self, + ) -> impl ExactSizeIterator { + self.blocks.iter() + } + pub(crate) fn block_parameters(&self, block: BasicBlockId) -> &[ValueId] { self.blocks[block].parameters() } @@ -237,6 +247,13 @@ impl std::ops::Index for DataFlowGraph { } } +impl std::ops::IndexMut for DataFlowGraph { + /// Get a mutable reference to a function's basic block for the given id. + fn index_mut(&mut self, id: BasicBlockId) -> &mut BasicBlock { + &mut self.blocks[id] + } +} + #[cfg(test)] mod tests { use super::DataFlowGraph; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index bb526076e3b..5937b374726 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -106,6 +106,14 @@ impl DenseMap { self.storage.push(f(id)); id } + + /// Gets an iterator to a reference to each element in the dense map paired with its id. + /// + /// The id-element pairs are ordered by the numeric values of the ids. + pub(crate) fn iter(&self) -> impl ExactSizeIterator, &T)> { + let ids_iter = (0..self.storage.len()).into_iter().map(|idx| Id::new(idx)); + ids_iter.zip(self.storage.iter()) + } } impl Default for DenseMap { From 573966db3e71d2cc69e744e87052bd5c9448a2e2 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Tue, 25 Apr 2023 16:40:54 +0100 Subject: [PATCH 06/66] chore: add RUST_BACKTRACE environment variable to nix config (#1216) --- flake.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/flake.nix b/flake.nix index fc8e5fa6868..28859ebb2b9 100644 --- a/flake.nix +++ b/flake.nix @@ -79,6 +79,9 @@ # hidden from the developer - i.e. when they see the command being run via `nix flake check` RUST_TEST_THREADS = "1"; + # We enable backtraces on any failure for help with debugging + RUST_BACKTRACE = "1"; + # We set the environment variable because barretenberg must be compiled in a special way for wasm BARRETENBERG_BIN_DIR = "${pkgs.barretenberg-wasm}/bin"; From 27b8bf8ca6a60ba5882525f645ace2fdde4b2a41 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 25 Apr 2023 14:09:33 -0400 Subject: [PATCH 07/66] chore(ssa refactor): Implement ssa-gen for binary, block, tuple, extract-tuple-field, and semi expressions (#1217) * Implement binary instructions * Cleanup PR --- .../src/ssa_refactor/ir/instruction.rs | 51 ++++++++----- .../ssa_builder/function_builder.rs | 21 +++-- .../src/ssa_refactor/ssa_gen/context.rs | 76 +++++++++++++++++++ .../src/ssa_refactor/ssa_gen/mod.rs | 50 +++++++++--- .../src/ssa_refactor/ssa_gen/value.rs | 1 + 5 files changed, 165 insertions(+), 34 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 442f1dbd47e..9b5aeb9388c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -170,32 +170,43 @@ pub(crate) struct Binary { } /// Binary Operations allowed in the IR. +/// Aside from the comparison operators (Eq and Lt), all operators +/// will return the same type as their operands. +/// The operand types must match for all binary operators. +/// All binary operators are also only for numeric types. To implement +/// e.g. equality for a compound type like a struct, one must add a +/// separate Eq operation for each field and combine them later with And. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub(crate) enum BinaryOp { - /// Addition of two types. - /// The result will have the same type as - /// the operands. + /// Addition of lhs + rhs. Add, - /// Subtraction of two types. - /// The result will have the same type as - /// the operands. + /// Subtraction of lhs - rhs. Sub, - /// Multiplication of two types. - /// The result will have the same type as - /// the operands. + /// Multiplication of lhs * rhs. Mul, - /// Division of two types. - /// The result will have the same type as - /// the operands. + /// Division of lhs / rhs. Div, + /// Modulus of lhs % rhs. + Mod, /// Checks whether two types are equal. /// Returns true if the types were equal and /// false otherwise. Eq, - /// Checks whether two types are equal. - /// Returns true if the types were not equal and - /// false otherwise. - Neq, + /// Checks whether the lhs is less than the rhs. + /// All other comparison operators should be translated + /// to less than. For example (a > b) = (b < a) = !(a >= b) = !(b <= a). + /// The result will always be a u1. + Lt, + /// Bitwise and (&) + And, + /// Bitwise or (|) + Or, + /// Bitwise xor (^) + Xor, + /// Shift lhs left by rhs bits (<<) + Shl, + /// Shift lhs right by rhs bits (>>) + Shr, } impl std::fmt::Display for BinaryOp { @@ -206,7 +217,13 @@ impl std::fmt::Display for BinaryOp { BinaryOp::Mul => write!(f, "mul"), BinaryOp::Div => write!(f, "div"), BinaryOp::Eq => write!(f, "eq"), - BinaryOp::Neq => write!(f, "neq"), + BinaryOp::Mod => write!(f, "mod"), + BinaryOp::Lt => write!(f, "lt"), + BinaryOp::And => write!(f, "and"), + BinaryOp::Or => write!(f, "or"), + BinaryOp::Xor => write!(f, "xor"), + BinaryOp::Shl => write!(f, "shl"), + BinaryOp::Shr => write!(f, "shr"), } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index c76d2943abe..7911aa2988a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -100,12 +100,23 @@ impl<'ssa> FunctionBuilder<'ssa> { self.insert_instruction(Instruction::Store { address, value }); } - /// Insert a Store instruction at the end of the current block, storing the given element - /// at the given address. Expects that the address points to a previous Allocate instruction. - /// Returns the result of the add instruction. - pub(crate) fn insert_add(&mut self, lhs: ValueId, rhs: ValueId, typ: Type) -> ValueId { - let operator = BinaryOp::Add; + /// Insert a binary instruction at the end of the current block. + /// Returns the result of the binary instruction. + pub(crate) fn insert_binary( + &mut self, + lhs: ValueId, + operator: BinaryOp, + rhs: ValueId, + typ: Type, + ) -> ValueId { let id = self.insert_instruction(Instruction::Binary(Binary { lhs, rhs, operator })); self.current_function.dfg.make_instruction_results(id, typ)[0] } + + /// Insert a not instruction at the end of the current block. + /// Returns the result of the instruction. + pub(crate) fn insert_not(&mut self, rhs: ValueId, typ: Type) -> ValueId { + let id = self.insert_instruction(Instruction::Not(rhs)); + self.current_function.dfg.make_instruction_results(id, typ)[0] + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 32133feea13..8f7b4e3de9a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -6,7 +6,9 @@ use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; use noirc_frontend::Signedness; +use crate::ssa_refactor::ir::instruction::BinaryOp; use crate::ssa_refactor::ir::types::Type; +use crate::ssa_refactor::ir::value::ValueId; use crate::ssa_refactor::ssa_builder::SharedBuilderContext; use crate::ssa_refactor::{ ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, @@ -123,6 +125,80 @@ impl<'a> FunctionContext<'a> { ast::Type::Vec(_) => Type::Reference, } } + + /// Insert a unit constant into the current function if not already + /// present, and return its value + pub(super) fn unit_value(&mut self) -> Values { + self.builder.numeric_constant(0u128.into(), Type::Unit).into() + } + + /// Insert a binary instruction at the end of the current block. + /// Converts the form of the binary instruction as necessary + /// (e.g. swapping arguments, inserting a not) to represent it in the IR. + /// For example, (a <= b) is represented as !(b < a) + pub(super) fn insert_binary( + &mut self, + mut lhs: ValueId, + operator: noirc_frontend::BinaryOpKind, + mut rhs: ValueId, + ) -> Values { + let op = convert_operator(operator); + + if operator_requires_swapped_operands(operator) { + std::mem::swap(&mut lhs, &mut rhs); + } + + // TODO: Rework how types are stored. + // They should be on values rather than on instruction results + let typ = Type::field(); + let mut result = self.builder.insert_binary(lhs, op, rhs, typ); + + if operator_requires_not(operator) { + result = self.builder.insert_not(result, typ); + } + result.into() + } +} + +/// True if the given operator cannot be encoded directly and needs +/// to be represented as !(some other operator) +fn operator_requires_not(op: noirc_frontend::BinaryOpKind) -> bool { + use noirc_frontend::BinaryOpKind::*; + matches!(op, NotEqual | LessEqual | GreaterEqual) +} + +/// True if the given operator cannot be encoded directly and needs +/// to have its lhs and rhs swapped to be represented with another operator. +/// Example: (a > b) needs to be represented as (b < a) +fn operator_requires_swapped_operands(op: noirc_frontend::BinaryOpKind) -> bool { + use noirc_frontend::BinaryOpKind::*; + matches!(op, Greater | LessEqual) +} + +/// Converts the given operator to the appropriate BinaryOp. +/// Take care when using this to insert a binary instruction: this requires +/// checking operator_requires_not and operator_requires_swapped_operands +/// to represent the full operation correctly. +fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { + use noirc_frontend::BinaryOpKind; + match op { + BinaryOpKind::Add => BinaryOp::Add, + BinaryOpKind::Subtract => BinaryOp::Sub, + BinaryOpKind::Multiply => BinaryOp::Mul, + BinaryOpKind::Divide => BinaryOp::Div, + BinaryOpKind::Modulo => BinaryOp::Mod, + BinaryOpKind::Equal => BinaryOp::Eq, + BinaryOpKind::NotEqual => BinaryOp::Eq, // Requires not + BinaryOpKind::Less => BinaryOp::Lt, + BinaryOpKind::Greater => BinaryOp::Lt, // Requires operand swap + BinaryOpKind::LessEqual => BinaryOp::Lt, // Requires operand swap and not + BinaryOpKind::GreaterEqual => BinaryOp::Lt, // Requires not + BinaryOpKind::And => BinaryOp::And, + BinaryOpKind::Or => BinaryOp::Or, + BinaryOpKind::Xor => BinaryOp::Xor, + BinaryOpKind::ShiftRight => BinaryOp::Shr, + BinaryOpKind::ShiftLeft => BinaryOp::Shl, + } } impl SharedContext { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 2f9c6646282..3b469ad9664 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -12,7 +12,10 @@ use self::{ value::{Tree, Values}, }; -use super::{ir::types::Type, ssa_builder::SharedBuilderContext}; +use super::{ + ir::{instruction::BinaryOp, types::Type, value::ValueId}, + ssa_builder::SharedBuilderContext, +}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); @@ -58,6 +61,17 @@ impl<'a> FunctionContext<'a> { } } + /// Codegen any non-tuple expression so that we can unwrap the Values + /// tree to return a single value for use with most SSA instructions. + fn codegen_non_tuple_expression(&mut self, expr: &Expression) -> ValueId { + match self.codegen_expression(expr) { + Tree::Branch(branches) => { + panic!("codegen_non_tuple_expression called on tuple {branches:?}") + } + Tree::Leaf(value) => value.eval(), + } + } + fn codegen_ident(&mut self, _ident: &ast::Ident) -> Values { todo!() } @@ -103,7 +117,7 @@ impl<'a> FunctionContext<'a> { array } else { let offset = self.builder.numeric_constant((i as u128).into(), Type::field()); - self.builder.insert_add(array, offset, Type::field()) + self.builder.insert_binary(array, BinaryOp::Add, offset, Type::field()) }; self.builder.insert_store(address, value.eval()); i += 1; @@ -113,16 +127,22 @@ impl<'a> FunctionContext<'a> { array.into() } - fn codegen_block(&mut self, _block: &[Expression]) -> Values { - todo!() + fn codegen_block(&mut self, block: &[Expression]) -> Values { + let mut result = self.unit_value(); + for expr in block { + result = self.codegen_expression(expr); + } + result } fn codegen_unary(&mut self, _unary: &ast::Unary) -> Values { todo!() } - fn codegen_binary(&mut self, _binary: &ast::Binary) -> Values { - todo!() + fn codegen_binary(&mut self, binary: &ast::Binary) -> Values { + let lhs = self.codegen_non_tuple_expression(&binary.lhs); + let rhs = self.codegen_non_tuple_expression(&binary.rhs); + self.insert_binary(lhs, binary.operator, rhs) } fn codegen_index(&mut self, _index: &ast::Index) -> Values { @@ -141,12 +161,17 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_tuple(&mut self, _tuple: &[Expression]) -> Values { - todo!() + fn codegen_tuple(&mut self, tuple: &[Expression]) -> Values { + Tree::Branch(vecmap(tuple, |expr| self.codegen_expression(expr))) } - fn codegen_extract_tuple_field(&mut self, _tuple: &Expression, _index: usize) -> Values { - todo!() + fn codegen_extract_tuple_field(&mut self, tuple: &Expression, index: usize) -> Values { + match self.codegen_expression(tuple) { + Tree::Branch(mut trees) => trees.remove(index), + Tree::Leaf(value) => { + unreachable!("Tried to extract tuple index {index} from non-tuple {value:?}") + } + } } fn codegen_call(&mut self, _call: &ast::Call) -> Values { @@ -165,7 +190,8 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_semi(&mut self, _semi: &Expression) -> Values { - todo!() + fn codegen_semi(&mut self, expr: &Expression) -> Values { + self.codegen_expression(expr); + self.unit_value() } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index c3911d367c1..83a5d15c904 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -2,6 +2,7 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; +#[derive(Debug)] pub(super) enum Tree { Branch(Vec>), Leaf(T), From c5457f4c201246fda0d8e0a1f0f08245cf26c3a7 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 25 Apr 2023 15:13:47 -0400 Subject: [PATCH 08/66] chore(ssa refactor): Update how instruction result types are retrieved (#1222) * Implement binary instructions * Cleanup PR * Change how instruction result types are handled * Reorganize make_instruction flow a bit --- .../src/ssa_refactor/ir/dfg.rs | 75 ++++++++++--------- .../src/ssa_refactor/ir/instruction.rs | 49 +++++++++--- .../src/ssa_refactor/ir/types.rs | 4 + .../src/ssa_refactor/ir/value.rs | 10 +++ .../ssa_builder/function_builder.rs | 30 ++++---- .../src/ssa_refactor/ssa_gen/context.rs | 7 +- .../src/ssa_refactor/ssa_gen/mod.rs | 4 +- 7 files changed, 111 insertions(+), 68 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index c21fc2c3f35..54ffd5a05f6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -2,7 +2,7 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, function::Signature, - instruction::{Instruction, InstructionId}, + instruction::{Instruction, InstructionId, InstructionResultType}, map::{DenseMap, Id, SecondaryMap, TwoWayMap}, types::Type, value::{Value, ValueId}, @@ -110,11 +110,19 @@ impl DataFlowGraph { } /// Inserts a new instruction into the DFG. - /// This does not add the instruction to the block or populate the instruction's result list - pub(crate) fn make_instruction(&mut self, instruction_data: Instruction) -> InstructionId { + /// This does not add the instruction to the block. + /// Returns the id of the new instruction and its results. + /// + /// Populates the instruction's results with the given ctrl_typevars if the instruction + /// is a Load, Call, or Intrinsic. Otherwise the instruction's results will be known + /// by the instruction itself and None can safely be passed for this parameter. + pub(crate) fn make_instruction( + &mut self, + instruction_data: Instruction, + ctrl_typevars: Option>, + ) -> InstructionId { let id = self.instructions.insert(instruction_data); - // Create a new vector to store the potential results for the instruction. - self.results.insert(id, Default::default()); + self.make_instruction_results(id, ctrl_typevars); id } @@ -134,46 +142,51 @@ impl DataFlowGraph { /// Attaches results to the instruction, clearing any previous results. /// /// Returns the results of the instruction - pub(crate) fn make_instruction_results( + fn make_instruction_results( &mut self, instruction_id: InstructionId, - ctrl_typevar: Type, - ) -> &[ValueId] { - // Clear all of the results instructions associated with this - // instruction. - self.results.get_mut(&instruction_id).expect("all instructions should have a `result` allocation when instruction was added to the DFG").clear(); + ctrl_typevars: Option>, + ) { + self.results.insert(instruction_id, Default::default()); // Get all of the types that this instruction produces // and append them as results. - let typs = self.instruction_result_types(instruction_id, ctrl_typevar); + let typs = self.instruction_result_types(instruction_id, ctrl_typevars); for typ in typs { self.append_result(instruction_id, typ); } - - self.results.get_mut(&instruction_id) - .expect("all instructions should have a `result` allocation when instruction was added to the DFG") - .as_slice() } /// Return the result types of this instruction. /// - /// For example, an addition instruction will return - /// one type which is the type of the operands involved. - /// This is the `ctrl_typevar` in this case. + /// In the case of Load, Call, and Intrinsic, the function's result + /// type may be unknown. In this case, the given ctrl_typevars are returned instead. + /// ctrl_typevars is taken in as an Option since it is common to omit them when getting + /// the type of an instruction that does not require them. Compared to passing an empty Vec, + /// Option has the benefit of panicking if it is accidentally used for a Call instruction, + /// rather than silently returning the empty Vec and continuing. fn instruction_result_types( &self, instruction_id: InstructionId, - ctrl_typevar: Type, + ctrl_typevars: Option>, ) -> Vec { - // Check if it is a call instruction. If so, we don't support that yet - let ins_data = &self.instructions[instruction_id]; - match ins_data { - Instruction::Call { .. } => todo!("function calls are not supported yet"), - ins => ins.return_types(ctrl_typevar), + let instruction = &self.instructions[instruction_id]; + match instruction.result_type() { + InstructionResultType::Known(typ) => vec![typ], + InstructionResultType::Operand(value) => vec![self.type_of_value(value)], + InstructionResultType::None => vec![], + InstructionResultType::Unknown => { + ctrl_typevars.expect("Control typevars required but not given") + } } } + /// Returns the type of a given value + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.values[value].get_type() + } + /// Appends a result type to the instruction. pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Type) -> ValueId { let results = self.results.get_mut(&instruction_id).unwrap(); @@ -257,21 +270,15 @@ impl std::ops::IndexMut for DataFlowGraph { #[cfg(test)] mod tests { use super::DataFlowGraph; - use crate::ssa_refactor::ir::{ - instruction::Instruction, - types::{NumericType, Type}, - }; + use crate::ssa_refactor::ir::instruction::Instruction; #[test] fn make_instruction() { let mut dfg = DataFlowGraph::default(); let ins = Instruction::Allocate { size: 20 }; - let ins_id = dfg.make_instruction(ins); - - let num_results = - dfg.make_instruction_results(ins_id, Type::Numeric(NumericType::NativeField)).len(); + let ins_id = dfg.make_instruction(ins, None); let results = dfg.instruction_results(ins_id); - assert_eq!(results.len(), num_results); + assert_eq!(results.len(), 1); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 9b5aeb9388c..dcab6e04006 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -105,23 +105,39 @@ impl Instruction { } } - /// Returns the types that this instruction will return. - pub(crate) fn return_types(&self, ctrl_typevar: Type) -> Vec { + /// Returns the type that this instruction will return. + pub(crate) fn result_type(&self) -> InstructionResultType { match self { - Instruction::Binary(_) => vec![ctrl_typevar], - Instruction::Cast(_, typ) => vec![*typ], - Instruction::Not(_) => vec![ctrl_typevar], - Instruction::Truncate { .. } => vec![ctrl_typevar], - Instruction::Constrain(_) => vec![], - Instruction::Call { .. } => vec![], - Instruction::Intrinsic { .. } => vec![], - Instruction::Allocate { .. } => vec![Type::Reference], - Instruction::Load { .. } => vec![ctrl_typevar], - Instruction::Store { .. } => vec![], + Instruction::Binary(binary) => binary.result_type(), + Instruction::Cast(_, typ) => InstructionResultType::Known(*typ), + Instruction::Allocate { .. } => InstructionResultType::Known(Type::Reference), + Instruction::Not(value) | Instruction::Truncate { value, .. } => { + InstructionResultType::Operand(*value) + } + Instruction::Constrain(_) | Instruction::Store { .. } => InstructionResultType::None, + Instruction::Load { .. } | Instruction::Call { .. } | Instruction::Intrinsic { .. } => { + InstructionResultType::Unknown + } } } } +/// The possible return values for Instruction::return_types +pub(crate) enum InstructionResultType { + /// The result type of this instruction matches that of this operand + Operand(ValueId), + + /// The result type of this instruction is known to be this type - independent of its operands. + Known(Type), + + /// The result type of this function is unknown and separate from its operand types. + /// This occurs for function and intrinsic calls. + Unknown, + + /// This instruction does not return any results. + None, +} + /// These are operations which can exit a basic block /// ie control flow type operations /// @@ -169,6 +185,15 @@ pub(crate) struct Binary { pub(crate) operator: BinaryOp, } +impl Binary { + pub(crate) fn result_type(&self) -> InstructionResultType { + match self.operator { + BinaryOp::Eq | BinaryOp::Lt => InstructionResultType::Known(Type::bool()), + _ => InstructionResultType::Operand(self.lhs), + } + } +} + /// Binary Operations allowed in the IR. /// Aside from the comparison operators (Eq and Lt), all operators /// will return the same type as their operands. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index 888d7d128d1..8a0f825a117 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -38,6 +38,10 @@ impl Type { Type::Numeric(NumericType::Unsigned { bit_size }) } + pub(crate) fn bool() -> Type { + Type::unsigned(1) + } + pub(crate) fn field() -> Type { Type::Numeric(NumericType::NativeField) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 537eabb0cab..a559522fadd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -28,3 +28,13 @@ pub(crate) enum Value { /// This Value originates from a numeric constant NumericConstant { constant: NumericConstantId, typ: Type }, } + +impl Value { + pub(crate) fn get_type(&self) -> Type { + match self { + Value::Instruction { typ, .. } => *typ, + Value::Param { typ, .. } => *typ, + Value::NumericConstant { typ, .. } => *typ, + } + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index 7911aa2988a..b30ff11c2e1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -3,7 +3,7 @@ use acvm::FieldElement; use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, - instruction::{Binary, BinaryOp, Instruction, InstructionId}, + instruction::{Binary, BinaryOp, Instruction}, types::Type, value::ValueId, }; @@ -71,18 +71,21 @@ impl<'ssa> FunctionBuilder<'ssa> { self.numeric_constant(value.into(), Type::field()) } - fn insert_instruction(&mut self, instruction: Instruction) -> InstructionId { - let id = self.current_function.dfg.make_instruction(instruction); + fn insert_instruction( + &mut self, + instruction: Instruction, + ctrl_typevars: Option>, + ) -> &[ValueId] { + let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); self.current_function.dfg.insert_instruction_in_block(self.current_block, id); - id + self.current_function.dfg.instruction_results(id) } /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { - let id = self.insert_instruction(Instruction::Allocate { size: size_to_allocate }); - self.current_function.dfg.make_instruction_results(id, Type::Reference)[0] + self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] } /// Insert a Load instruction at the end of the current block, loading from the given address @@ -90,14 +93,13 @@ impl<'ssa> FunctionBuilder<'ssa> { /// a single value. Loading multiple values (such as a tuple) will require multiple loads. /// Returns the element that was loaded. pub(crate) fn insert_load(&mut self, address: ValueId, type_to_load: Type) -> ValueId { - let id = self.insert_instruction(Instruction::Load { address }); - self.current_function.dfg.make_instruction_results(id, type_to_load)[0] + self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] } /// Insert a Store instruction at the end of the current block, storing the given element /// at the given address. Expects that the address points to a previous Allocate instruction. pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { - self.insert_instruction(Instruction::Store { address, value }); + self.insert_instruction(Instruction::Store { address, value }, None); } /// Insert a binary instruction at the end of the current block. @@ -107,16 +109,14 @@ impl<'ssa> FunctionBuilder<'ssa> { lhs: ValueId, operator: BinaryOp, rhs: ValueId, - typ: Type, ) -> ValueId { - let id = self.insert_instruction(Instruction::Binary(Binary { lhs, rhs, operator })); - self.current_function.dfg.make_instruction_results(id, typ)[0] + let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); + self.insert_instruction(instruction, None)[0] } /// Insert a not instruction at the end of the current block. /// Returns the result of the instruction. - pub(crate) fn insert_not(&mut self, rhs: ValueId, typ: Type) -> ValueId { - let id = self.insert_instruction(Instruction::Not(rhs)); - self.current_function.dfg.make_instruction_results(id, typ)[0] + pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { + self.insert_instruction(Instruction::Not(rhs), None)[0] } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 8f7b4e3de9a..f76a6675077 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -148,13 +148,10 @@ impl<'a> FunctionContext<'a> { std::mem::swap(&mut lhs, &mut rhs); } - // TODO: Rework how types are stored. - // They should be on values rather than on instruction results - let typ = Type::field(); - let mut result = self.builder.insert_binary(lhs, op, rhs, typ); + let mut result = self.builder.insert_binary(lhs, op, rhs); if operator_requires_not(operator) { - result = self.builder.insert_not(result, typ); + result = self.builder.insert_not(result); } result.into() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 3b469ad9664..553b5eb2218 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -116,8 +116,8 @@ impl<'a> FunctionContext<'a> { let address = if i == 0 { array } else { - let offset = self.builder.numeric_constant((i as u128).into(), Type::field()); - self.builder.insert_binary(array, BinaryOp::Add, offset, Type::field()) + let offset = self.builder.field_constant(i as u128); + self.builder.insert_binary(array, BinaryOp::Add, offset) }; self.builder.insert_store(address, value.eval()); i += 1; From a10182e46c1bb2bd37237ff86f214fd11295624c Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 25 Apr 2023 21:56:57 +0100 Subject: [PATCH 09/66] chore(nargo): update panic message to suggest searching for similar issues (#1224) chore: update panic message to suggest searching for similar issues --- crates/nargo_cli/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/nargo_cli/src/main.rs b/crates/nargo_cli/src/main.rs index bd3f1352bda..a73785c64c6 100644 --- a/crates/nargo_cli/src/main.rs +++ b/crates/nargo_cli/src/main.rs @@ -7,7 +7,7 @@ fn main() -> eyre::Result<()> { // Register a panic hook to display more readable panic messages to end-users let (panic_hook, _) = HookBuilder::default() .display_env_section(false) - .panic_section("This is a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml") + .panic_section("This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml") .into_hooks(); panic_hook.install(); From 3a65f304c25e8239f9735ce1e6dee29d7eecc244 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Wed, 26 Apr 2023 18:19:45 +0100 Subject: [PATCH 10/66] feat(noir): added `distinct` keyword (#1219) * feat(noir): added `distinct` keyword for preventing witness overlap in program input and output * chore(noir): dedup within output also. Futhermore: - reorder keywords (distinct first) - add test case - fix up comments & typos - tidy up * chore(hir): hoist "main" constant for reuse * chore(noir): comment typo Co-authored-by: jfecher --------- Co-authored-by: jfecher --- crates/noirc_abi/src/lib.rs | 24 ++++++++++++++++++ crates/noirc_evaluator/src/lib.rs | 20 +++++++++++++-- .../src/ssa/acir_gen/operations/return.rs | 12 ++++++++- crates/noirc_frontend/src/ast/expression.rs | 1 + crates/noirc_frontend/src/hir/def_map/mod.rs | 5 ++-- .../src/hir/resolution/errors.rs | 14 +++++++++++ .../src/hir/resolution/resolver.rs | 22 ++++++++++++++-- .../noirc_frontend/src/hir/type_check/mod.rs | 1 + crates/noirc_frontend/src/hir_def/function.rs | 4 ++- crates/noirc_frontend/src/lexer/token.rs | 3 +++ .../src/monomorphization/ast.rs | 13 ++++++++-- .../src/monomorphization/mod.rs | 5 ++-- crates/noirc_frontend/src/parser/parser.rs | 25 +++++++++++++++---- 13 files changed, 132 insertions(+), 17 deletions(-) diff --git a/crates/noirc_abi/src/lib.rs b/crates/noirc_abi/src/lib.rs index dbd935dcde0..191128b9407 100644 --- a/crates/noirc_abi/src/lib.rs +++ b/crates/noirc_abi/src/lib.rs @@ -85,6 +85,30 @@ impl std::fmt::Display for AbiVisibility { } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +/// Represents whether the return value should compromise of unique witness indices such that no +/// index occurs within the program's abi more than once. +/// +/// This is useful for application stacks that require an uniform abi across across multiple +/// circuits. When index duplication is allowed, the compiler may identify that a public input +/// reaches the output unaltered and is thus referenced directly, causing the input and output +/// witness indices to overlap. Similarly, repetitions of copied values in the output may be +/// optimized away. +pub enum AbiDistinctness { + Distinct, + DuplicationAllowed, +} + +impl std::fmt::Display for AbiDistinctness { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AbiDistinctness::Distinct => write!(f, "distinct"), + AbiDistinctness::DuplicationAllowed => write!(f, "duplication-allowed"), + } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum Sign { diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 8b3cbb009a9..64a02061b0f 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -54,6 +54,9 @@ pub struct Evaluator { // and increasing as for `public_parameters`. We then use a `Vec` rather // than a `BTreeSet` to preserve this order for the ABI. return_values: Vec, + // If true, indicates that the resulting ACIR should enforce that all inputs and outputs are + // comprised of unique witness indices by having extra constraints if necessary. + return_is_distinct: bool, opcodes: Vec, } @@ -102,6 +105,11 @@ pub fn create_circuit( } impl Evaluator { + // Returns true if the `witness_index` appears in the program's input parameters. + fn is_abi_input(&self, witness_index: Witness) -> bool { + witness_index.as_usize() <= self.num_witnesses_abi_len + } + // Returns true if the `witness_index` // was created in the ABI as a private input. // @@ -111,11 +119,17 @@ impl Evaluator { // If the `witness_index` is more than the `num_witnesses_abi_len` // then it was created after the ABI was processed and is therefore // an intermediate variable. - let is_intermediate_variable = witness_index.as_usize() > self.num_witnesses_abi_len; let is_public_input = self.public_parameters.contains(&witness_index); - !is_intermediate_variable && !is_public_input + self.is_abi_input(witness_index) && !is_public_input + } + + // True if the main function return has the `distinct` keyword and this particular witness + // index has already occurred elsewhere in the abi's inputs and outputs. + fn should_proxy_witness_for_abi_output(&self, witness_index: Witness) -> bool { + self.return_is_distinct + && (self.is_abi_input(witness_index) || self.return_values.contains(&witness_index)) } // Creates a new Witness index @@ -139,6 +153,8 @@ impl Evaluator { enable_logging: bool, show_output: bool, ) -> Result<(), RuntimeError> { + self.return_is_distinct = + program.return_distinctness == noirc_abi::AbiDistinctness::Distinct; let mut ir_gen = IrGenerator::new(program); self.parse_abi_alt(&mut ir_gen); diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs index 3269af06d16..6aaa3b2fbbd 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/return.rs @@ -1,3 +1,5 @@ +use acvm::acir::native_types::Expression; + use crate::{ errors::RuntimeErrorKind, ssa::{ @@ -46,7 +48,15 @@ pub(crate) fn evaluate( "we do not allow private ABI inputs to be returned as public outputs", ))); } - evaluator.return_values.push(witness); + // Check if the outputted witness needs separating from an existing occurrence in the + // abi. This behavior stems from usage of the `distinct` keyword. + let return_witness = if evaluator.should_proxy_witness_for_abi_output(witness) { + let proxy_constraint = Expression::from(witness); + evaluator.create_intermediate_variable(proxy_constraint) + } else { + witness + }; + evaluator.return_values.push(return_witness); } } diff --git a/crates/noirc_frontend/src/ast/expression.rs b/crates/noirc_frontend/src/ast/expression.rs index ac6161ddac1..9be6f715a14 100644 --- a/crates/noirc_frontend/src/ast/expression.rs +++ b/crates/noirc_frontend/src/ast/expression.rs @@ -325,6 +325,7 @@ pub struct FunctionDefinition { pub span: Span, pub return_type: UnresolvedType, pub return_visibility: noirc_abi::AbiVisibility, + pub return_distinctness: noirc_abi::AbiDistinctness, } /// Describes the types of smart contract functions that are allowed. diff --git a/crates/noirc_frontend/src/hir/def_map/mod.rs b/crates/noirc_frontend/src/hir/def_map/mod.rs index 25e0488a7b6..fdaf2dd3acc 100644 --- a/crates/noirc_frontend/src/hir/def_map/mod.rs +++ b/crates/noirc_frontend/src/hir/def_map/mod.rs @@ -18,6 +18,9 @@ pub use module_data::*; mod namespace; pub use namespace::*; +/// The name that is used for a non-contract program's entry-point function. +pub const MAIN_FUNCTION: &str = "main"; + // XXX: Ultimately, we want to constrain an index to be of a certain type just like in RA /// Lets first check if this is offered by any external crate /// XXX: RA has made this a crate on crates.io @@ -104,8 +107,6 @@ impl CrateDefMap { /// Find the main function for this crate pub fn main_function(&self) -> Option { - const MAIN_FUNCTION: &str = "main"; - let root_module = &self.modules()[self.root.0]; // This function accepts an Ident, so we attach a dummy span to diff --git a/crates/noirc_frontend/src/hir/resolution/errors.rs b/crates/noirc_frontend/src/hir/resolution/errors.rs index 9406474a226..c57e4c890d2 100644 --- a/crates/noirc_frontend/src/hir/resolution/errors.rs +++ b/crates/noirc_frontend/src/hir/resolution/errors.rs @@ -32,6 +32,8 @@ pub enum ResolverError { UnnecessaryPub { ident: Ident }, #[error("Required 'pub', main function must return public value")] NecessaryPub { ident: Ident }, + #[error("'distinct' keyword can only be used with main method")] + DistinctNotAllowed { ident: Ident }, #[error("Expected const value where non-constant value was used")] ExpectedComptimeVariable { name: String, span: Span }, #[error("Missing expression for declared constant")] @@ -176,6 +178,18 @@ impl From for Diagnostic { diag.add_note("The `pub` keyword is mandatory for the entry-point function return type because the verifier cannot retrieve private witness and thus the function will not be able to return a 'priv' value".to_owned()); diag } + ResolverError::DistinctNotAllowed { ident } => { + let name = &ident.0.contents; + + let mut diag = Diagnostic::simple_error( + format!("Invalid `distinct` keyword on return type of function {name}"), + "Invalid distinct on return type".to_string(), + ident.0.span(), + ); + + diag.add_note("The `distinct` keyword is only valid when used on the main function of a program, as its only purpose is to ensure that all witness indices that occur in the abi are unique".to_owned()); + diag + } ResolverError::ExpectedComptimeVariable { name, span } => Diagnostic::simple_error( format!("expected constant variable where non-constant variable {name} was used"), "expected const variable".to_string(), diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index cfb354498ab..98cf5993edf 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -22,7 +22,7 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::rc::Rc; use crate::graph::CrateId; -use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId}; +use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; use crate::hir_def::stmt::{HirAssignStatement, HirLValue, HirPattern}; use crate::node_interner::{ DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, StructId, @@ -637,6 +637,12 @@ impl<'a> Resolver<'a> { self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); } + if !self.distinct_allowed(func) + && func.def.return_distinctness != noirc_abi::AbiDistinctness::DuplicationAllowed + { + self.push_err(ResolverError::DistinctNotAllowed { ident: func.name_ident().clone() }); + } + if attributes == Some(Attribute::Test) && !parameters.is_empty() { self.push_err(ResolverError::TestFunctionHasParameters { span: func.name_ident().span(), @@ -661,6 +667,7 @@ impl<'a> Resolver<'a> { typ, parameters: parameters.into(), return_visibility: func.def.return_visibility, + return_distinctness: func.def.return_distinctness, has_body: !func.def.body.is_empty(), } } @@ -670,7 +677,18 @@ impl<'a> Resolver<'a> { if self.in_contract() { !func.def.is_unconstrained && !func.def.is_open } else { - func.name() == "main" + func.name() == MAIN_FUNCTION + } + } + + /// True if the `distinct` keyword is allowed on a function's return type + fn distinct_allowed(&self, func: &NoirFunction) -> bool { + if self.in_contract() { + // "open" and "unconstrained" functions are compiled to brillig and thus duplication of + // witness indices in their abis is not a concern. + !func.def.is_unconstrained && !func.def.is_open + } else { + func.name() == MAIN_FUNCTION } } diff --git a/crates/noirc_frontend/src/hir/type_check/mod.rs b/crates/noirc_frontend/src/hir/type_check/mod.rs index 97b1c71a0bc..5ebac6de9a3 100644 --- a/crates/noirc_frontend/src/hir/type_check/mod.rs +++ b/crates/noirc_frontend/src/hir/type_check/mod.rs @@ -219,6 +219,7 @@ mod test { ] .into(), return_visibility: noirc_abi::AbiVisibility::Private, + return_distinctness: noirc_abi::AbiDistinctness::DuplicationAllowed, has_body: true, }; interner.push_fn_meta(func_meta, func_id); diff --git a/crates/noirc_frontend/src/hir_def/function.rs b/crates/noirc_frontend/src/hir_def/function.rs index a9fafffe159..1f7399e5547 100644 --- a/crates/noirc_frontend/src/hir_def/function.rs +++ b/crates/noirc_frontend/src/hir_def/function.rs @@ -1,5 +1,5 @@ use iter_extended::vecmap; -use noirc_abi::{AbiParameter, AbiType, AbiVisibility}; +use noirc_abi::{AbiDistinctness, AbiParameter, AbiType, AbiVisibility}; use noirc_errors::{Location, Span}; use super::expr::{HirBlockExpression, HirExpression, HirIdent}; @@ -131,6 +131,8 @@ pub struct FuncMeta { pub return_visibility: AbiVisibility, + pub return_distinctness: AbiDistinctness, + /// The type of this function. Either a Type::Function /// or a Type::Forall for generic functions. pub typ: Type, diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 0df1fc39938..6b021a3dcbb 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -421,6 +421,7 @@ pub enum Keyword { Contract, Crate, Dep, + Distinct, Else, Field, Fn, @@ -454,6 +455,7 @@ impl fmt::Display for Keyword { Keyword::Contract => write!(f, "contract"), Keyword::Crate => write!(f, "crate"), Keyword::Dep => write!(f, "dep"), + Keyword::Distinct => write!(f, "distinct"), Keyword::Else => write!(f, "else"), Keyword::Field => write!(f, "Field"), Keyword::Fn => write!(f, "fn"), @@ -490,6 +492,7 @@ impl Keyword { "contract" => Keyword::Contract, "crate" => Keyword::Crate, "dep" => Keyword::Dep, + "distinct" => Keyword::Distinct, "else" => Keyword::Else, "Field" => Keyword::Field, "fn" => Keyword::Fn, diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index e4339c8e367..04aec9a6726 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -221,11 +221,20 @@ impl Type { pub struct Program { pub functions: Vec, pub main_function_signature: FunctionSignature, + /// Indicates whether witness indices are allowed to reoccur in the ABI of the resulting ACIR. + /// + /// Note: this has no impact on monomorphization, and is simply attached here for ease of + /// forwarding to the next phase. + pub return_distinctness: noirc_abi::AbiDistinctness, } impl Program { - pub fn new(functions: Vec, main_function_signature: FunctionSignature) -> Program { - Program { functions, main_function_signature } + pub fn new( + functions: Vec, + main_function_signature: FunctionSignature, + return_distinctness: noirc_abi::AbiDistinctness, + ) -> Program { + Program { functions, main_function_signature, return_distinctness } } pub fn main(&self) -> &Function { diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index bfce292d2eb..79c9bab7d8a 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -17,7 +17,7 @@ use std::collections::{BTreeMap, HashMap, VecDeque}; use crate::{ hir_def::{ expr::*, - function::{Param, Parameters}, + function::{FuncMeta, Param, Parameters}, stmt::{HirAssignStatement, HirLValue, HirLetStatement, HirPattern, HirStatement}, }, node_interner::{self, DefinitionKind, NodeInterner, StmtId}, @@ -88,7 +88,8 @@ pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Pro } let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); - Program::new(functions, function_sig) + let FuncMeta { return_distinctness, .. } = interner.function_meta(&main); + Program::new(functions, function_sig, return_distinctness) } impl<'interner> Monomorphizer<'interner> { diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index f4793d06368..065b6362fb4 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -40,7 +40,7 @@ use crate::{ use chumsky::prelude::*; use iter_extended::vecmap; -use noirc_abi::AbiVisibility; +use noirc_abi::{AbiDistinctness, AbiVisibility}; use noirc_errors::{CustomDiagnostic, Span, Spanned}; /// Entry function for the parser - also handles lexing internally. @@ -162,7 +162,7 @@ fn function_definition(allow_self: bool) -> impl NoirParser { |( ( ((((attribute, (is_unconstrained, is_open)), name), generics), parameters), - (return_visibility, return_type), + ((return_distinctness, return_visibility), return_type), ), body, )| { @@ -177,6 +177,7 @@ fn function_definition(allow_self: bool) -> impl NoirParser { body, return_type, return_visibility, + return_distinctness, } .into() }, @@ -235,12 +236,18 @@ fn lambda_return_type() -> impl NoirParser { .map(|ret| ret.unwrap_or(UnresolvedType::Unspecified)) } -fn function_return_type() -> impl NoirParser<(AbiVisibility, UnresolvedType)> { +fn function_return_type() -> impl NoirParser<((AbiDistinctness, AbiVisibility), UnresolvedType)> { just(Token::Arrow) - .ignore_then(optional_visibility()) + .ignore_then(optional_distinctness()) + .then(optional_visibility()) .then(parse_type()) .or_not() - .map(|ret| ret.unwrap_or((AbiVisibility::Private, UnresolvedType::Unit))) + .map(|ret| { + ret.unwrap_or(( + (AbiDistinctness::DuplicationAllowed, AbiVisibility::Private), + UnresolvedType::Unit, + )) + }) } fn attribute() -> impl NoirParser { @@ -554,6 +561,13 @@ fn optional_visibility() -> impl NoirParser { }) } +fn optional_distinctness() -> impl NoirParser { + keyword(Keyword::Distinct).or_not().map(|opt| match opt { + Some(_) => AbiDistinctness::Distinct, + None => AbiDistinctness::DuplicationAllowed, + }) +} + fn maybe_comp_time() -> impl NoirParser { keyword(Keyword::CompTime).or_not().map(|opt| match opt { Some(_) => CompTime::Yes(None), @@ -1257,6 +1271,7 @@ mod test { "fn f(f: pub Field, y : Field, z : comptime Field) -> u8 { x + a }", "fn func_name(f: Field, y : pub Field, z : pub [u8;5],) {}", "fn func_name(x: [Field], y : [Field;2],y : pub [Field;2], z : pub [u8;5]) {}", + "fn main(x: pub u8, y: pub u8) -> distinct pub [u8; 2] { [x, y] }" ], ); From e551e55e4ed16c5dfb6e05f66389674d9a737fc5 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 26 Apr 2023 13:34:55 -0400 Subject: [PATCH 11/66] chore(ssa refactor): Implement ssa-gen for indexing, cast, constrain, if, unary (#1225) * Implement ssa-gen for if * Satisfy the clippy gods --- .../src/ssa_refactor/ir/cfg.rs | 23 ++-- .../src/ssa_refactor/ir/constant.rs | 4 +- .../src/ssa_refactor/ir/dfg.rs | 24 ++++- .../src/ssa_refactor/ir/function.rs | 2 +- .../src/ssa_refactor/ir/instruction.rs | 11 +- .../src/ssa_refactor/ir/printer.rs | 12 +-- .../ssa_builder/function_builder.rs | 101 ++++++++++++++++-- .../src/ssa_refactor/ssa_gen/context.rs | 11 +- .../src/ssa_refactor/ssa_gen/mod.rs | 99 ++++++++++++----- .../src/ssa_refactor/ssa_gen/value.rs | 10 ++ .../src/monomorphization/ast.rs | 1 + .../src/monomorphization/mod.rs | 2 +- 12 files changed, 230 insertions(+), 70 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index 05b64e30ed8..3e469361c37 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -133,29 +133,27 @@ mod tests { // Build function of form // fn func { // block0(cond: u1): - // jmpif cond(), then: block2, else: block1 + // jmpif cond, then: block2, else: block1 // block1(): - // jmpif cond(), then: block1, else: block2 + // jmpif cond, then: block1, else: block2 // block2(): - // return + // return () // } let mut func = Function::new("func".into()); let block0_id = func.entry_block(); let cond = func.dfg.add_block_parameter(block0_id, Type::unsigned(1)); - let block1_id = func.dfg.new_block(); - let block2_id = func.dfg.new_block(); + let block1_id = func.dfg.make_block(); + let block2_id = func.dfg.make_block(); func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { condition: cond, then_destination: block2_id, else_destination: block1_id, - arguments: vec![], }); func.dfg[block1_id].set_terminator(TerminatorInstruction::JmpIf { condition: cond, then_destination: block1_id, else_destination: block2_id, - arguments: vec![], }); func.dfg[block2_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); @@ -192,15 +190,15 @@ mod tests { // Modify function to form: // fn func { // block0(cond: u1): - // jmpif cond(), then: block1, else: ret_block + // jmpif cond, then: block1, else: ret_block // block1(): - // jmpif cond(), then: block1, else: block2 + // jmpif cond, then: block1, else: block2 // block2(): - // jmp ret_block + // jmp ret_block() // ret_block(): - // return + // return () // } - let ret_block_id = func.dfg.new_block(); + let ret_block_id = func.dfg.make_block(); func.dfg[ret_block_id] .set_terminator(TerminatorInstruction::Return { return_values: vec![] }); func.dfg[block2_id].set_terminator(TerminatorInstruction::Jmp { @@ -211,7 +209,6 @@ mod tests { condition: cond, then_destination: block1_id, else_destination: ret_block_id, - arguments: vec![], }); // Recompute new and changed blocks diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs index 6d5538d3410..4c793a144da 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs @@ -16,8 +16,8 @@ impl NumericConstant { Self(value) } - pub(crate) fn value(&self) -> &FieldElement { - &self.0 + pub(crate) fn value(&self) -> FieldElement { + self.0 } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 54ffd5a05f6..8acce876d90 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -2,7 +2,7 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, function::Signature, - instruction::{Instruction, InstructionId, InstructionResultType}, + instruction::{Instruction, InstructionId, InstructionResultType, TerminatorInstruction}, map::{DenseMap, Id, SecondaryMap, TwoWayMap}, types::Type, value::{Value, ValueId}, @@ -75,14 +75,14 @@ impl DataFlowGraph { /// Creates a new basic block with no parameters. /// After being created, the block is unreachable in the current function /// until another block is made to jump to it. - pub(crate) fn new_block(&mut self) -> BasicBlockId { + pub(crate) fn make_block(&mut self) -> BasicBlockId { self.blocks.insert(BasicBlock::new(Vec::new())) } /// Creates a new basic block with the given parameters. /// After being created, the block is unreachable in the current function /// until another block is made to jump to it. - pub(crate) fn new_block_with_parameters( + pub(crate) fn make_block_with_parameters( &mut self, parameter_types: impl Iterator, ) -> BasicBlockId { @@ -230,6 +230,24 @@ impl DataFlowGraph { ) { self.blocks[block].insert_instruction(instruction); } + + /// Returns the field element represented by this value if it is a numeric constant. + /// Returns None if the given value is not a numeric constant. + pub(crate) fn get_numeric_constant(&self, value: Id) -> Option { + match self.values[value] { + Value::NumericConstant { constant, .. } => Some(self[constant].value()), + _ => None, + } + } + + /// Sets the terminator instruction for the given basic block + pub(crate) fn set_block_terminator( + &mut self, + block: BasicBlockId, + terminator: TerminatorInstruction, + ) { + self.blocks[block].set_terminator(terminator); + } } impl std::ops::Index for DataFlowGraph { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 63cd31142c4..1a735726029 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -32,7 +32,7 @@ impl Function { /// Note that any parameters to the function must be manually added later. pub(crate) fn new(name: String) -> Self { let mut dfg = DataFlowGraph::default(); - let entry_block = dfg.new_block(); + let entry_block = dfg.make_block(); Self { name, source_locations: SecondaryMap::new(), entry_block, dfg } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index dcab6e04006..11c6b8dc05f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -150,14 +150,9 @@ pub(crate) enum TerminatorInstruction { /// /// Jump If /// - /// If the condition is true: jump to the specified `then_destination` with `arguments`. - /// Otherwise, jump to the specified `else_destination` with `arguments`. - JmpIf { - condition: ValueId, - then_destination: BasicBlockId, - else_destination: BasicBlockId, - arguments: Vec, - }, + /// If the condition is true: jump to the specified `then_destination`. + /// Otherwise, jump to the specified `else_destination`. + JmpIf { condition: ValueId, then_destination: BasicBlockId, else_destination: BasicBlockId }, /// Unconditional Jump /// diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 1a7737e97b0..a711482e08c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -57,17 +57,11 @@ pub(crate) fn display_terminator( Some(TerminatorInstruction::Jmp { destination, arguments }) => { writeln!(f, " jmp {}({})", destination, value_list(arguments)) } - Some(TerminatorInstruction::JmpIf { - condition, - arguments, - then_destination, - else_destination, - }) => { - let args = value_list(arguments); + Some(TerminatorInstruction::JmpIf { condition, then_destination, else_destination }) => { writeln!( f, - " jmpif {}({}) then: {}, else: {}", - condition, args, then_destination, else_destination + " jmpif {} then: {}, else: {}", + condition, then_destination, else_destination ) } Some(TerminatorInstruction::Return { return_values }) => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index b30ff11c2e1..c0a94be6f80 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -3,7 +3,7 @@ use acvm::FieldElement; use crate::ssa_refactor::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, - instruction::{Binary, BinaryOp, Instruction}, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, types::Type, value::ValueId, }; @@ -62,8 +62,12 @@ impl<'ssa> FunctionBuilder<'ssa> { } /// Insert a numeric constant into the current function - pub(crate) fn numeric_constant(&mut self, value: FieldElement, typ: Type) -> ValueId { - self.current_function.dfg.make_constant(value, typ) + pub(crate) fn numeric_constant( + &mut self, + value: impl Into, + typ: Type, + ) -> ValueId { + self.current_function.dfg.make_constant(value.into(), typ) } /// Insert a numeric constant into the current function of type Field @@ -71,6 +75,19 @@ impl<'ssa> FunctionBuilder<'ssa> { self.numeric_constant(value.into(), Type::field()) } + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.current_function.dfg.type_of_value(value) + } + + pub(crate) fn insert_block(&mut self) -> BasicBlockId { + self.current_function.dfg.make_block() + } + + pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { + self.current_function.dfg.add_block_parameter(block, typ) + } + + /// Inserts a new instruction at the end of the current block and returns its results fn insert_instruction( &mut self, instruction: Instruction, @@ -81,6 +98,13 @@ impl<'ssa> FunctionBuilder<'ssa> { self.current_function.dfg.instruction_results(id) } + /// Switch to inserting instructions in the given block. + /// Expects the given block to be within the same function. If you want to insert + /// instructions into a new function, call new_function instead. + pub(crate) fn switch_to_block(&mut self, block: BasicBlockId) { + self.current_block = block; + } + /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. @@ -88,16 +112,31 @@ impl<'ssa> FunctionBuilder<'ssa> { self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] } - /// Insert a Load instruction at the end of the current block, loading from the given address - /// which should point to a previous Allocate instruction. Note that this is limited to loading - /// a single value. Loading multiple values (such as a tuple) will require multiple loads. + /// Insert a Load instruction at the end of the current block, loading from the given offset + /// of the given address which should point to a previous Allocate instruction. Note that + /// this is limited to loading a single value. Loading multiple values (such as a tuple) + /// will require multiple loads. + /// 'offset' is in units of FieldElements here. So loading the fourth FieldElement stored in + /// an array will have an offset of 3. /// Returns the element that was loaded. - pub(crate) fn insert_load(&mut self, address: ValueId, type_to_load: Type) -> ValueId { + pub(crate) fn insert_load( + &mut self, + mut address: ValueId, + offset: ValueId, + type_to_load: Type, + ) -> ValueId { + if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { + if !offset.is_zero() { + let offset = self.field_constant(offset); + address = self.insert_binary(address, BinaryOp::Add, offset); + } + }; self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] } /// Insert a Store instruction at the end of the current block, storing the given element - /// at the given address. Expects that the address points to a previous Allocate instruction. + /// at the given address. Expects that the address points somewhere + /// within a previous Allocate instruction. pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { self.insert_instruction(Instruction::Store { address, value }, None); } @@ -119,4 +158,50 @@ impl<'ssa> FunctionBuilder<'ssa> { pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { self.insert_instruction(Instruction::Not(rhs), None)[0] } + + /// Insert a cast instruction at the end of the current block. + /// Returns the result of the cast instruction. + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + self.insert_instruction(Instruction::Cast(value, typ), None)[0] + } + + /// Insert a constrain instruction at the end of the current block. + pub(crate) fn insert_constrain(&mut self, boolean: ValueId) { + self.insert_instruction(Instruction::Constrain(boolean), None); + } + + /// Terminates the current block with the given terminator instruction + fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { + self.current_function.dfg.set_block_terminator(self.current_block, terminator); + } + + /// Terminate the current block with a jmp instruction to jmp to the given + /// block with the given arguments. + pub(crate) fn terminate_with_jmp( + &mut self, + destination: BasicBlockId, + arguments: Vec, + ) { + self.terminate_block_with(TerminatorInstruction::Jmp { destination, arguments }); + } + + /// Terminate the current block with a jmpif instruction to jmp with the given arguments + /// block with the given arguments. + pub(crate) fn terminate_with_jmpif( + &mut self, + condition: ValueId, + then_destination: BasicBlockId, + else_destination: BasicBlockId, + ) { + self.terminate_block_with(TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + }); + } + + /// Terminate the current block with a return instruction + pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { + self.terminate_block_with(TerminatorInstruction::Return { return_values }); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index f76a6675077..30855b8fdc8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -129,7 +129,7 @@ impl<'a> FunctionContext<'a> { /// Insert a unit constant into the current function if not already /// present, and return its value pub(super) fn unit_value(&mut self) -> Values { - self.builder.numeric_constant(0u128.into(), Type::Unit).into() + self.builder.numeric_constant(0u128, Type::Unit).into() } /// Insert a binary instruction at the end of the current block. @@ -155,6 +155,15 @@ impl<'a> FunctionContext<'a> { } result.into() } + + /// Create a const offset of an address for an array load or store + pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { + if offset != 0 { + let offset = self.builder.field_constant(offset); + address = self.builder.insert_binary(address, BinaryOp::Add, offset); + } + address + } } /// True if the given operator cannot be encoded directly and needs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 553b5eb2218..04fb88d76d0 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -1,7 +1,6 @@ mod context; mod value; -use acvm::FieldElement; use context::SharedContext; use iter_extended::vecmap; use noirc_errors::Location; @@ -88,15 +87,11 @@ impl<'a> FunctionContext<'a> { self.builder.numeric_constant(*value, typ).into() } ast::Literal::Bool(value) => { - // Booleans are represented as u1s with 0 = false, 1 = true - let typ = Type::unsigned(1); - let value = FieldElement::from(*value as u128); - self.builder.numeric_constant(value, typ).into() + self.builder.numeric_constant(*value as u128, Type::bool()).into() } ast::Literal::Str(string) => { let elements = vecmap(string.as_bytes(), |byte| { - let value = FieldElement::from(*byte as u128); - self.builder.numeric_constant(value, Type::field()).into() + self.builder.numeric_constant(*byte as u128, Type::field()).into() }); self.codegen_array(elements, Tree::Leaf(Type::field())) } @@ -110,15 +105,10 @@ impl<'a> FunctionContext<'a> { })); // Now we must manually store all the elements into the array - let mut i = 0; + let mut i = 0u128; for element in elements { element.for_each(|value| { - let address = if i == 0 { - array - } else { - let offset = self.builder.field_constant(i as u128); - self.builder.insert_binary(array, BinaryOp::Add, offset) - }; + let address = self.make_offset(array, i); self.builder.insert_store(address, value.eval()); i += 1; }); @@ -135,8 +125,16 @@ impl<'a> FunctionContext<'a> { result } - fn codegen_unary(&mut self, _unary: &ast::Unary) -> Values { - todo!() + fn codegen_unary(&mut self, unary: &ast::Unary) -> Values { + let rhs = self.codegen_non_tuple_expression(&unary.rhs); + match unary.operator { + noirc_frontend::UnaryOp::Not => self.builder.insert_not(rhs).into(), + noirc_frontend::UnaryOp::Minus => { + let typ = self.builder.type_of_value(rhs); + let zero = self.builder.numeric_constant(0u128, typ); + self.builder.insert_binary(zero, BinaryOp::Sub, rhs).into() + } + } } fn codegen_binary(&mut self, binary: &ast::Binary) -> Values { @@ -145,20 +143,71 @@ impl<'a> FunctionContext<'a> { self.insert_binary(lhs, binary.operator, rhs) } - fn codegen_index(&mut self, _index: &ast::Index) -> Values { - todo!() + fn codegen_index(&mut self, index: &ast::Index) -> Values { + let array = self.codegen_non_tuple_expression(&index.collection); + let base_offset = self.codegen_non_tuple_expression(&index.index); + + // base_index = base_offset * type_size + let type_size = Self::convert_type(&index.element_type).size_of_type(); + let type_size = self.builder.field_constant(type_size as u128); + let base_index = self.builder.insert_binary(base_offset, BinaryOp::Mul, type_size); + + let mut field_index = 0u128; + self.map_type(&index.element_type, |ctx, typ| { + let offset = ctx.make_offset(base_index, field_index); + field_index += 1; + ctx.builder.insert_load(array, offset, typ).into() + }) } - fn codegen_cast(&mut self, _cast: &ast::Cast) -> Values { - todo!() + fn codegen_cast(&mut self, cast: &ast::Cast) -> Values { + let lhs = self.codegen_non_tuple_expression(&cast.lhs); + let typ = Self::convert_non_tuple_type(&cast.r#type); + self.builder.insert_cast(lhs, typ).into() } fn codegen_for(&mut self, _for_expr: &ast::For) -> Values { todo!() } - fn codegen_if(&mut self, _if_expr: &ast::If) -> Values { - todo!() + fn codegen_if(&mut self, if_expr: &ast::If) -> Values { + let condition = self.codegen_non_tuple_expression(&if_expr.condition); + + let then_block = self.builder.insert_block(); + let else_block = self.builder.insert_block(); + + self.builder.terminate_with_jmpif(condition, then_block, else_block); + + self.builder.switch_to_block(then_block); + let then_value = self.codegen_expression(&if_expr.consequence); + + let mut result = self.unit_value(); + + if let Some(alternative) = &if_expr.alternative { + self.builder.switch_to_block(else_block); + let else_value = self.codegen_expression(alternative); + + let end_block = self.builder.insert_block(); + + // Create block arguments for the end block as needed to branch to + // with our then and else value. + result = self.map_type(&if_expr.typ, |ctx, typ| { + ctx.builder.add_block_parameter(end_block, typ).into() + }); + + self.builder.terminate_with_jmp(end_block, else_value.into_value_list()); + + // Must also set the then block to jmp to the end now + self.builder.switch_to_block(then_block); + self.builder.terminate_with_jmp(end_block, then_value.into_value_list()); + self.builder.switch_to_block(end_block); + } else { + // In the case we have no 'else', the 'else' block is actually the end block. + self.builder.terminate_with_jmp(else_block, vec![]); + self.builder.switch_to_block(else_block); + } + + result } fn codegen_tuple(&mut self, tuple: &[Expression]) -> Values { @@ -182,8 +231,10 @@ impl<'a> FunctionContext<'a> { todo!() } - fn codegen_constrain(&mut self, _constrain: &Expression, _location: Location) -> Values { - todo!() + fn codegen_constrain(&mut self, expr: &Expression, _location: Location) -> Values { + let boolean = self.codegen_non_tuple_expression(expr); + self.builder.insert_constrain(boolean); + self.unit_value() } fn codegen_assign(&mut self, _assign: &ast::Assign) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 83a5d15c904..31a93374940 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,3 +1,5 @@ +use iter_extended::vecmap; + use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; @@ -76,3 +78,11 @@ impl Tree { self.count_leaves() } } + +impl Tree { + /// Flattens and evaluates this Tree into a list of ir values + /// for return statements, branching instructions, or function parameters. + pub(super) fn into_value_list(self) -> Vec { + vecmap(self.flatten(), Value::eval) + } +} diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 04aec9a6726..bad88885749 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -131,6 +131,7 @@ pub struct Call { pub struct Index { pub collection: Box, pub index: Box, + pub element_type: Type, pub location: Location, } diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index 79c9bab7d8a..3c3c602d132 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -412,7 +412,7 @@ impl<'interner> Monomorphizer<'interner> { | ast::Type::Bool | ast::Type::Unit | ast::Type::Function(_, _) => { - ast::Expression::Index(ast::Index { collection, index, location }) + ast::Expression::Index(ast::Index { collection, index, element_type, location }) } ast::Type::Tuple(elements) => { From 0dc2cac5bc26d277a0e6377fd774e0ec9c8d3531 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 26 Apr 2023 19:52:25 +0200 Subject: [PATCH 12/66] feat(noir): added assert keyword (#1227) * feat(keyword): added assert keyword * test(keyword): added integration test * fix(parser): separate parser for assertion * test(parser): fix test * style(parser): fix whitespaces * refactor(keyword): assert use constrain parser * feat(parser): give assertions function form * fix(lexer): update the basic test * style: label the whole assertion statement --- .../tests/test_data/assert/Nargo.toml | 5 ++ .../tests/test_data/assert/Prover.toml | 1 + .../tests/test_data/assert/src/main.nr | 3 ++ crates/noirc_frontend/src/lexer/lexer.rs | 10 ++++ crates/noirc_frontend/src/lexer/token.rs | 3 ++ crates/noirc_frontend/src/parser/parser.rs | 53 +++++++++++++++++++ 6 files changed, 75 insertions(+) create mode 100644 crates/nargo_cli/tests/test_data/assert/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/assert/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/assert/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/assert/Nargo.toml b/crates/nargo_cli/tests/test_data/assert/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/assert/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/assert/Prover.toml b/crates/nargo_cli/tests/test_data/assert/Prover.toml new file mode 100644 index 00000000000..4dd6b405159 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/assert/Prover.toml @@ -0,0 +1 @@ +x = "1" diff --git a/crates/nargo_cli/tests/test_data/assert/src/main.nr b/crates/nargo_cli/tests/test_data/assert/src/main.nr new file mode 100644 index 00000000000..00e94414c0b --- /dev/null +++ b/crates/nargo_cli/tests/test_data/assert/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field) { + assert(x == 1); +} diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index c1ff328a3ed..5e0d99cfed9 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -560,6 +560,7 @@ fn test_basic_language_syntax() { x * y; }; constrain mul(five, ten) == 50; + assert(ten + five == 15); "; let expected = vec![ @@ -601,6 +602,15 @@ fn test_basic_language_syntax() { Token::Equal, Token::Int(50_i128.into()), Token::Semicolon, + Token::Keyword(Keyword::Assert), + Token::LeftParen, + Token::Ident("ten".to_string()), + Token::Plus, + Token::Ident("five".to_string()), + Token::Equal, + Token::Int(15_i128.into()), + Token::RightParen, + Token::Semicolon, Token::EOF, ]; let mut lexer = Lexer::new(input); diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index 6b021a3dcbb..bfcd0f4be51 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -414,6 +414,7 @@ impl AsRef for Attribute { #[cfg_attr(test, derive(strum_macros::EnumIter))] pub enum Keyword { As, + Assert, Bool, Char, CompTime, @@ -448,6 +449,7 @@ impl fmt::Display for Keyword { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Keyword::As => write!(f, "as"), + Keyword::Assert => write!(f, "assert"), Keyword::Bool => write!(f, "bool"), Keyword::Char => write!(f, "char"), Keyword::CompTime => write!(f, "comptime"), @@ -485,6 +487,7 @@ impl Keyword { pub(crate) fn lookup_keyword(word: &str) -> Option { let keyword = match word { "as" => Keyword::As, + "assert" => Keyword::Assert, "bool" => Keyword::Bool, "char" => Keyword::Char, "comptime" => Keyword::CompTime, diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 065b6362fb4..15ed0d74222 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -435,6 +435,7 @@ where { choice(( constrain(expr_parser.clone()), + assertion(expr_parser.clone()), declaration(expr_parser.clone()), assignment(expr_parser.clone()), expr_parser.map(Statement::Expression), @@ -449,6 +450,15 @@ where .map(|expr| Statement::Constrain(ConstrainStatement(expr))) } +fn assertion<'a, P>(expr_parser: P) -> impl NoirParser + 'a +where + P: ExprParser + 'a, +{ + ignore_then_commit(keyword(Keyword::Assert), parenthesized(expr_parser)) + .labelled("statement") + .map(|expr| Statement::Constrain(ConstrainStatement(expr))) +} + fn declaration<'a, P>(expr_parser: P) -> impl NoirParser + 'a where P: ExprParser + 'a, @@ -1228,6 +1238,47 @@ mod test { ); } + #[test] + fn parse_assert() { + parse_with(assertion(expression()), "assert(x == y)").unwrap(); + + // Currently we disallow constrain statements where the outer infix operator + // produces a value. This would require an implicit `==` which + // may not be intuitive to the user. + // + // If this is deemed useful, one would either apply a transformation + // or interpret it with an `==` in the evaluator + let disallowed_operators = vec![ + BinaryOpKind::And, + BinaryOpKind::Subtract, + BinaryOpKind::Divide, + BinaryOpKind::Multiply, + BinaryOpKind::Or, + ]; + + for operator in disallowed_operators { + let src = format!("assert(x {} y);", operator.as_string()); + parse_with(assertion(expression()), &src).unwrap_err(); + } + + // These are general cases which should always work. + // + // The first case is the most noteworthy. It contains two `==` + // The first (inner) `==` is a predicate which returns 0/1 + // The outer layer is an infix `==` which is + // associated with the Constrain statement + parse_all( + assertion(expression()), + vec![ + "assert(((x + y) == k) + z == y)", + "assert((x + !y) == y)", + "assert((x ^ y) == y)", + "assert((x ^ y) == (y + m))", + "assert(x + x ^ x == y | m)", + ], + ); + } + #[test] fn parse_let() { // Why is it valid to specify a let declaration as having type u8? @@ -1483,7 +1534,9 @@ mod test { ("let", 3, "let $error: unspecified = Error"), ("foo = one two three", 1, "foo = plain::one"), ("constrain", 1, "constrain Error"), + ("assert", 1, "constrain Error"), ("constrain x ==", 1, "constrain (plain::x == Error)"), + ("assert(x ==)", 1, "constrain (plain::x == Error)"), ]; let show_errors = |v| vecmap(v, ToString::to_string).join("\n"); From 407cecbcd410b59bae20907c2c62b512d41a5cb9 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 26 Apr 2023 16:27:11 -0400 Subject: [PATCH 13/66] chore(ssa refactor): Fix recursive printing of blocks (#1230) * Implement ssa-gen for if * Satisfy the clippy gods * Fix printing bug * Print constants directly * chore(ssa refactor): Implement for loops (#1233) Impl for loops --- .../src/ssa_refactor/ir/dfg.rs | 11 ++- .../src/ssa_refactor/ir/map.rs | 16 +++- .../src/ssa_refactor/ir/printer.rs | 63 +++++++++----- .../src/ssa_refactor/ssa_gen/context.rs | 5 ++ .../src/ssa_refactor/ssa_gen/mod.rs | 30 ++++++- crates/noirc_frontend/src/parser/parser.rs | 82 +++++++++---------- 6 files changed, 142 insertions(+), 65 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 8acce876d90..9b713eee06e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -234,8 +234,17 @@ impl DataFlowGraph { /// Returns the field element represented by this value if it is a numeric constant. /// Returns None if the given value is not a numeric constant. pub(crate) fn get_numeric_constant(&self, value: Id) -> Option { + self.get_numeric_constant_with_type(value).map(|(value, _typ)| value) + } + + /// Returns the field element and type represented by this value if it is a numeric constant. + /// Returns None if the given value is not a numeric constant. + pub(crate) fn get_numeric_constant_with_type( + &self, + value: Id, + ) -> Option<(FieldElement, Type)> { match self.values[value] { - Value::NumericConstant { constant, .. } => Some(self[constant].value()), + Value::NumericConstant { constant, typ } => Some((self[constant].value(), typ)), _ => None, } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 5937b374726..24b30241293 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -69,9 +69,21 @@ impl std::fmt::Debug for Id { } } -impl std::fmt::Display for Id { +impl std::fmt::Display for Id { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "${}", self.index) + write!(f, "b{}", self.index) + } +} + +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "v{}", self.index) + } +} + +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "f{}", self.index) } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index a711482e08c..57c573c7bd4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -1,5 +1,8 @@ //! This file is for pretty-printing the SSA IR in a human-readable form for debugging. -use std::fmt::{Formatter, Result}; +use std::{ + collections::HashSet, + fmt::{Formatter, Result}, +}; use iter_extended::vecmap; @@ -12,19 +15,26 @@ use super::{ pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "fn {} {{", function.name)?; - display_block_with_successors(function, function.entry_block, f)?; + display_block_with_successors(function, function.entry_block, &mut HashSet::new(), f)?; write!(f, "}}") } +/// Displays a block followed by all of its successors recursively. +/// This uses a HashSet to keep track of the visited blocks. Otherwise, +/// there would be infinite recursion for any loops in the IR. pub(crate) fn display_block_with_successors( function: &Function, block_id: BasicBlockId, + visited: &mut HashSet, f: &mut Formatter, ) -> Result { display_block(function, block_id, f)?; + visited.insert(block_id); for successor in function.dfg[block_id].successors() { - display_block(function, successor, f)?; + if !visited.contains(&successor) { + display_block_with_successors(function, successor, visited, f)?; + } } Ok(()) } @@ -36,26 +46,36 @@ pub(crate) fn display_block( ) -> Result { let block = &function.dfg[block_id]; - writeln!(f, "{}({}):", block_id, value_list(block.parameters()))?; + writeln!(f, " {}({}):", block_id, value_list(function, block.parameters()))?; for instruction in block.instructions() { display_instruction(function, *instruction, f)?; } - display_terminator(block.terminator(), f) + display_terminator(function, block.terminator(), f) +} + +/// Specialize displaying value ids so that if they refer to constants we +/// print the constant directly +fn value(function: &Function, id: ValueId) -> String { + match function.dfg.get_numeric_constant_with_type(id) { + Some((value, typ)) => format!("{} {}", value, typ), + None => id.to_string(), + } } -fn value_list(values: &[ValueId]) -> String { - vecmap(values, ToString::to_string).join(", ") +fn value_list(function: &Function, values: &[ValueId]) -> String { + vecmap(values, |id| value(function, *id)).join(", ") } pub(crate) fn display_terminator( + function: &Function, terminator: Option<&TerminatorInstruction>, f: &mut Formatter, ) -> Result { match terminator { Some(TerminatorInstruction::Jmp { destination, arguments }) => { - writeln!(f, " jmp {}({})", destination, value_list(arguments)) + writeln!(f, " jmp {}({})", destination, value_list(function, arguments)) } Some(TerminatorInstruction::JmpIf { condition, then_destination, else_destination }) => { writeln!( @@ -65,7 +85,7 @@ pub(crate) fn display_terminator( ) } Some(TerminatorInstruction::Return { return_values }) => { - writeln!(f, " return {}", value_list(return_values)) + writeln!(f, " return {}", value_list(function, return_values)) } None => writeln!(f, " (no terminator instruction)"), } @@ -81,29 +101,34 @@ pub(crate) fn display_instruction( let results = function.dfg.instruction_results(instruction); if !results.is_empty() { - write!(f, "{} = ", value_list(results))?; + write!(f, "{} = ", value_list(function, results))?; } + let show = |id| value(function, id); + match &function.dfg[instruction] { Instruction::Binary(binary) => { - writeln!(f, "{} {}, {}", binary.operator, binary.lhs, binary.rhs) + writeln!(f, "{} {}, {}", binary.operator, show(binary.lhs), show(binary.rhs)) } - Instruction::Cast(value, typ) => writeln!(f, "cast {value} as {typ}"), - Instruction::Not(value) => writeln!(f, "not {value}"), + Instruction::Cast(lhs, typ) => writeln!(f, "cast {} as {typ}", show(*lhs)), + Instruction::Not(rhs) => writeln!(f, "not {}", show(*rhs)), Instruction::Truncate { value, bit_size, max_bit_size } => { - writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}") + let value = show(*value); + writeln!(f, "truncate {value} to {bit_size} bits, max_bit_size: {max_bit_size}",) } Instruction::Constrain(value) => { - writeln!(f, "constrain {value}") + writeln!(f, "constrain {}", show(*value)) } Instruction::Call { func, arguments } => { - writeln!(f, "call {func}({})", value_list(arguments)) + writeln!(f, "call {func}({})", value_list(function, arguments)) } Instruction::Intrinsic { func, arguments } => { - writeln!(f, "intrinsic {func}({})", value_list(arguments)) + writeln!(f, "intrinsic {func}({})", value_list(function, arguments)) } Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), - Instruction::Load { address } => writeln!(f, "load {address}"), - Instruction::Store { address, value } => writeln!(f, "store {value} at {address}"), + Instruction::Load { address } => writeln!(f, "load {}", show(*address)), + Instruction::Store { address, value } => { + writeln!(f, "store {} at {}", show(*address), show(*value)) + } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 30855b8fdc8..48175ebb52b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -164,6 +164,11 @@ impl<'a> FunctionContext<'a> { } address } + + pub(super) fn define(&mut self, id: LocalId, value: Values) { + let existing = self.definitions.insert(id, value); + assert!(existing.is_none(), "Variable {id:?} was defined twice in ssa-gen pass"); + } } /// True if the given operator cannot be encoded directly and needs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 04fb88d76d0..f8faf8eeeb4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -166,8 +166,34 @@ impl<'a> FunctionContext<'a> { self.builder.insert_cast(lhs, typ).into() } - fn codegen_for(&mut self, _for_expr: &ast::For) -> Values { - todo!() + fn codegen_for(&mut self, for_expr: &ast::For) -> Values { + let loop_entry = self.builder.insert_block(); + let loop_body = self.builder.insert_block(); + let loop_end = self.builder.insert_block(); + + // this is the 'i' in `for i in start .. end { block }` + let loop_index = self.builder.add_block_parameter(loop_entry, Type::field()); + + let start_index = self.codegen_non_tuple_expression(&for_expr.start_range); + let end_index = self.codegen_non_tuple_expression(&for_expr.end_range); + + self.builder.terminate_with_jmp(loop_entry, vec![start_index]); + + // Compile the loop entry block + self.builder.switch_to_block(loop_entry); + let jump_condition = self.builder.insert_binary(loop_index, BinaryOp::Lt, end_index); + self.builder.terminate_with_jmpif(jump_condition, loop_body, loop_end); + + // Compile the loop body + self.builder.switch_to_block(loop_body); + self.define(for_expr.index_variable, loop_index.into()); + self.codegen_expression(&for_expr.block); + let new_loop_index = self.make_offset(loop_index, 1); + self.builder.terminate_with_jmp(loop_entry, vec![new_loop_index]); + + // Finish by switching back to the end of the loop + self.builder.switch_to_block(loop_end); + self.unit_value() } fn codegen_if(&mut self, if_expr: &ast::If) -> Values { diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 15ed0d74222..575a9403ea8 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -1238,46 +1238,46 @@ mod test { ); } - #[test] - fn parse_assert() { - parse_with(assertion(expression()), "assert(x == y)").unwrap(); - - // Currently we disallow constrain statements where the outer infix operator - // produces a value. This would require an implicit `==` which - // may not be intuitive to the user. - // - // If this is deemed useful, one would either apply a transformation - // or interpret it with an `==` in the evaluator - let disallowed_operators = vec![ - BinaryOpKind::And, - BinaryOpKind::Subtract, - BinaryOpKind::Divide, - BinaryOpKind::Multiply, - BinaryOpKind::Or, - ]; - - for operator in disallowed_operators { - let src = format!("assert(x {} y);", operator.as_string()); - parse_with(assertion(expression()), &src).unwrap_err(); - } - - // These are general cases which should always work. - // - // The first case is the most noteworthy. It contains two `==` - // The first (inner) `==` is a predicate which returns 0/1 - // The outer layer is an infix `==` which is - // associated with the Constrain statement - parse_all( - assertion(expression()), - vec![ - "assert(((x + y) == k) + z == y)", - "assert((x + !y) == y)", - "assert((x ^ y) == y)", - "assert((x ^ y) == (y + m))", - "assert(x + x ^ x == y | m)", - ], - ); - } + #[test] + fn parse_assert() { + parse_with(assertion(expression()), "assert(x == y)").unwrap(); + + // Currently we disallow constrain statements where the outer infix operator + // produces a value. This would require an implicit `==` which + // may not be intuitive to the user. + // + // If this is deemed useful, one would either apply a transformation + // or interpret it with an `==` in the evaluator + let disallowed_operators = vec![ + BinaryOpKind::And, + BinaryOpKind::Subtract, + BinaryOpKind::Divide, + BinaryOpKind::Multiply, + BinaryOpKind::Or, + ]; + + for operator in disallowed_operators { + let src = format!("assert(x {} y);", operator.as_string()); + parse_with(assertion(expression()), &src).unwrap_err(); + } + + // These are general cases which should always work. + // + // The first case is the most noteworthy. It contains two `==` + // The first (inner) `==` is a predicate which returns 0/1 + // The outer layer is an infix `==` which is + // associated with the Constrain statement + parse_all( + assertion(expression()), + vec![ + "assert(((x + y) == k) + z == y)", + "assert((x + !y) == y)", + "assert((x ^ y) == y)", + "assert((x ^ y) == (y + m))", + "assert(x + x ^ x == y | m)", + ], + ); + } #[test] fn parse_let() { @@ -1322,7 +1322,7 @@ mod test { "fn f(f: pub Field, y : Field, z : comptime Field) -> u8 { x + a }", "fn func_name(f: Field, y : pub Field, z : pub [u8;5],) {}", "fn func_name(x: [Field], y : [Field;2],y : pub [Field;2], z : pub [u8;5]) {}", - "fn main(x: pub u8, y: pub u8) -> distinct pub [u8; 2] { [x, y] }" + "fn main(x: pub u8, y: pub u8) -> distinct pub [u8; 2] { [x, y] }", ], ); From 62dcc5c287ab386caba6e74314f49aedbefc318c Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 26 Apr 2023 18:33:16 -0400 Subject: [PATCH 14/66] chore(ssa refactor): Implement mutable and immutable variables (#1234) * Implement ssa-gen for if * Satisfy the clippy gods * Fix printing bug * Print constants directly * Impl for loops * Implement immutable and mutable variables * chore(ssa refactor): Implement for loops (#1233) Impl for loops --- .../src/ssa_refactor/ir/dfg.rs | 16 +++- .../ssa_builder/function_builder.rs | 28 +++++- .../src/ssa_refactor/ssa_gen/context.rs | 46 ++++++++++ .../src/ssa_refactor/ssa_gen/mod.rs | 92 +++++++++++++------ .../src/ssa_refactor/ssa_gen/value.rs | 54 +++++++++-- 5 files changed, 201 insertions(+), 35 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 9b713eee06e..ab2018b1df8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -126,6 +126,17 @@ impl DataFlowGraph { id } + /// Replace an instruction id with another. + /// + /// This function should generally be avoided if possible in favor of inserting new + /// instructions since it does not check whether the instruction results of the removed + /// instruction are still in use. Users of this function thus need to ensure the old + /// instruction's results are no longer in use or are otherwise compatible with the + /// new instruction's result count and types. + pub(crate) fn replace_instruction(&mut self, id: Id, instruction: Instruction) { + self.instructions[id] = instruction; + } + /// Insert a value into the dfg's storage and return an id to reference it. /// Until the value is used in an instruction it is unreachable. pub(crate) fn make_value(&mut self, value: Value) -> ValueId { @@ -141,8 +152,11 @@ impl DataFlowGraph { /// Attaches results to the instruction, clearing any previous results. /// + /// This does not normally need to be called manually as it is called within + /// make_instruction automatically. + /// /// Returns the results of the instruction - fn make_instruction_results( + pub(crate) fn make_instruction_results( &mut self, instruction_id: InstructionId, ctrl_typevars: Option>, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs index c0a94be6f80..d11e9a763cd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs @@ -5,7 +5,7 @@ use crate::ssa_refactor::ir::{ function::{Function, FunctionId}, instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, types::Type, - value::ValueId, + value::{Value, ValueId}, }; use super::SharedBuilderContext; @@ -204,4 +204,30 @@ impl<'ssa> FunctionBuilder<'ssa> { pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { self.terminate_block_with(TerminatorInstruction::Return { return_values }); } + + /// Mutates a load instruction into a store instruction. + /// + /// This function is used while generating ssa-form for assignments currently. + /// To re-use most of the expression infrastructure, the lvalue of an assignment + /// is compiled as an expression and to assign to it we replace the final load + /// (which should always be present to load a mutable value) with a store of the + /// assigned value. + pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { + let (instruction, address) = match &self.current_function.dfg[load_result] { + Value::Instruction { instruction, .. } => { + match &self.current_function.dfg[*instruction] { + Instruction::Load { address } => (*instruction, *address), + other => { + panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") + } + } + } + other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), + }; + + let store = Instruction::Store { address, value: value_to_store }; + self.current_function.dfg.replace_instruction(instruction, store); + // Clear the results of the previous load for safety + self.current_function.dfg.make_instruction_results(instruction, None); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 48175ebb52b..10206e28c2d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -21,6 +21,7 @@ type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { definitions: HashMap, + pub(super) builder: FunctionBuilder<'a>, shared_context: &'a SharedContext, } @@ -165,10 +166,55 @@ impl<'a> FunctionContext<'a> { address } + /// Define a local variable to be some Values that can later be retrieved + /// by calling self.lookup(id) pub(super) fn define(&mut self, id: LocalId, value: Values) { let existing = self.definitions.insert(id, value); assert!(existing.is_none(), "Variable {id:?} was defined twice in ssa-gen pass"); } + + /// Looks up the value of a given local variable. Expects the variable to have + /// been previously defined or panics otherwise. + pub(super) fn lookup(&self, id: LocalId) -> Values { + self.definitions.get(&id).expect("lookup: variable not defined").clone() + } + + /// Extract the given field of the tuple. Panics if the given Values is not + /// a Tree::Branch or does not have enough fields. + pub(super) fn get_field(tuple: Values, field_index: usize) -> Values { + match tuple { + Tree::Branch(mut trees) => trees.remove(field_index), + Tree::Leaf(value) => { + unreachable!("Tried to extract tuple index {field_index} from non-tuple {value:?}") + } + } + } + + /// Mutate lhs to equal rhs + pub(crate) fn assign(&mut self, lhs: Values, rhs: Values) { + match (lhs, rhs) { + (Tree::Branch(lhs_branches), Tree::Branch(rhs_branches)) => { + assert_eq!(lhs_branches.len(), rhs_branches.len()); + + for (lhs, rhs) in lhs_branches.into_iter().zip(rhs_branches) { + self.assign(lhs, rhs); + } + } + (Tree::Leaf(lhs), Tree::Leaf(rhs)) => { + // Re-evaluating these should have no effect + let (lhs, rhs) = (lhs.eval(self), rhs.eval(self)); + + // Expect lhs to be previously evaluated. If it is a load we need to undo + // the load to get the address to store to. + self.builder.mutate_load_into_store(lhs, rhs); + } + (lhs, rhs) => { + unreachable!( + "assign: Expected lhs and rhs values to match but found {lhs:?} and {rhs:?}" + ) + } + } + } } /// True if the given operator cannot be encoded directly and needs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index f8faf8eeeb4..a7880032d42 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -63,16 +63,16 @@ impl<'a> FunctionContext<'a> { /// Codegen any non-tuple expression so that we can unwrap the Values /// tree to return a single value for use with most SSA instructions. fn codegen_non_tuple_expression(&mut self, expr: &Expression) -> ValueId { - match self.codegen_expression(expr) { - Tree::Branch(branches) => { - panic!("codegen_non_tuple_expression called on tuple {branches:?}") - } - Tree::Leaf(value) => value.eval(), - } + self.codegen_expression(expr).into_leaf().eval(self) } - fn codegen_ident(&mut self, _ident: &ast::Ident) -> Values { - todo!() + fn codegen_ident(&mut self, ident: &ast::Ident) -> Values { + match &ident.definition { + ast::Definition::Local(id) => self.lookup(*id).map(|value| value.eval(self).into()), + ast::Definition::Function(_) => todo!(), + ast::Definition::Builtin(_) => todo!(), + ast::Definition::LowLevel(_) => todo!(), + } } fn codegen_literal(&mut self, literal: &ast::Literal) -> Values { @@ -107,9 +107,10 @@ impl<'a> FunctionContext<'a> { // Now we must manually store all the elements into the array let mut i = 0u128; for element in elements { - element.for_each(|value| { + element.for_each(|element| { let address = self.make_offset(array, i); - self.builder.insert_store(address, value.eval()); + let element = element.eval(self); + self.builder.insert_store(address, element); i += 1; }); } @@ -145,15 +146,26 @@ impl<'a> FunctionContext<'a> { fn codegen_index(&mut self, index: &ast::Index) -> Values { let array = self.codegen_non_tuple_expression(&index.collection); - let base_offset = self.codegen_non_tuple_expression(&index.index); + self.codegen_array_index(array, &index.index, &index.element_type) + } + + /// This is broken off from codegen_index so that it can also be + /// used to codegen a LValue::Index + fn codegen_array_index( + &mut self, + array: super::ir::value::ValueId, + index: &ast::Expression, + element_type: &ast::Type, + ) -> Values { + let base_offset = self.codegen_non_tuple_expression(index); // base_index = base_offset * type_size - let type_size = Self::convert_type(&index.element_type).size_of_type(); + let type_size = Self::convert_type(element_type).size_of_type(); let type_size = self.builder.field_constant(type_size as u128); let base_index = self.builder.insert_binary(base_offset, BinaryOp::Mul, type_size); let mut field_index = 0u128; - self.map_type(&index.element_type, |ctx, typ| { + self.map_type(element_type, |ctx, typ| { let offset = ctx.make_offset(base_index, field_index); field_index += 1; ctx.builder.insert_load(array, offset, typ).into() @@ -221,11 +233,13 @@ impl<'a> FunctionContext<'a> { ctx.builder.add_block_parameter(end_block, typ).into() }); - self.builder.terminate_with_jmp(end_block, else_value.into_value_list()); + let else_values = else_value.into_value_list(self); + self.builder.terminate_with_jmp(end_block, else_values); // Must also set the then block to jmp to the end now self.builder.switch_to_block(then_block); - self.builder.terminate_with_jmp(end_block, then_value.into_value_list()); + let then_values = then_value.into_value_list(self); + self.builder.terminate_with_jmp(end_block, then_values); self.builder.switch_to_block(end_block); } else { // In the case we have no 'else', the 'else' block is actually the end block. @@ -240,21 +254,30 @@ impl<'a> FunctionContext<'a> { Tree::Branch(vecmap(tuple, |expr| self.codegen_expression(expr))) } - fn codegen_extract_tuple_field(&mut self, tuple: &Expression, index: usize) -> Values { - match self.codegen_expression(tuple) { - Tree::Branch(mut trees) => trees.remove(index), - Tree::Leaf(value) => { - unreachable!("Tried to extract tuple index {index} from non-tuple {value:?}") - } - } + fn codegen_extract_tuple_field(&mut self, tuple: &Expression, field_index: usize) -> Values { + let tuple = self.codegen_expression(tuple); + Self::get_field(tuple, field_index) } fn codegen_call(&mut self, _call: &ast::Call) -> Values { todo!() } - fn codegen_let(&mut self, _let_expr: &ast::Let) -> Values { - todo!() + fn codegen_let(&mut self, let_expr: &ast::Let) -> Values { + let mut values = self.codegen_expression(&let_expr.expression); + + if let_expr.mutable { + values.map_mut(|value| { + let value = value.eval(self); + // Size is always 1 here since we're recursively unpacking tuples + let alloc = self.builder.insert_allocate(1); + self.builder.insert_store(alloc, value); + alloc.into() + }); + } + + self.define(let_expr.id, values); + self.unit_value() } fn codegen_constrain(&mut self, expr: &Expression, _location: Location) -> Values { @@ -263,8 +286,25 @@ impl<'a> FunctionContext<'a> { self.unit_value() } - fn codegen_assign(&mut self, _assign: &ast::Assign) -> Values { - todo!() + fn codegen_assign(&mut self, assign: &ast::Assign) -> Values { + let lhs = self.codegen_lvalue(&assign.lvalue); + let rhs = self.codegen_expression(&assign.expression); + self.assign(lhs, rhs); + self.unit_value() + } + + fn codegen_lvalue(&mut self, lvalue: &ast::LValue) -> Values { + match lvalue { + ast::LValue::Ident(ident) => self.codegen_ident(ident), + ast::LValue::Index { array, index, element_type, location: _ } => { + let array = self.codegen_lvalue(array).into_leaf().eval(self); + self.codegen_array_index(array, index, element_type) + } + ast::LValue::MemberAccess { object, field_index } => { + let object = self.codegen_lvalue(object); + Self::get_field(object, *field_index) + } + } } fn codegen_semi(&mut self, expr: &Expression) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 31a93374940..52ff52d75f2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -4,25 +4,34 @@ use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; -#[derive(Debug)] +use super::context::FunctionContext; + +#[derive(Debug, Clone)] pub(super) enum Tree { Branch(Vec>), Leaf(T), } -#[derive(Debug, Clone)] +#[derive(Debug, Copy, Clone)] pub(super) enum Value { Normal(IrValueId), Function(IrFunctionId), + + /// A mutable variable that must be loaded as the given type before being used + Mutable(IrValueId, Type), } impl Value { /// Evaluate a value, returning an IrValue from it. - /// This has no effect on Value::Normal, but any variables will be updated with their latest - /// use. - pub(super) fn eval(self) -> IrValueId { + /// This has no effect on Value::Normal, but any variables will + /// need to be loaded from memory + pub(super) fn eval(self, ctx: &mut FunctionContext) -> IrValueId { match self { Value::Normal(value) => value, + Value::Mutable(address, typ) => { + let offset = ctx.builder.field_constant(0u128); + ctx.builder.insert_load(address, offset, typ) + } Value::Function(_) => panic!("Tried to evaluate a function value"), } } @@ -56,6 +65,37 @@ impl Tree { Tree::Leaf(value) => f(value), } } + + pub(super) fn map_mut(&mut self, mut f: impl FnMut(&T) -> Tree) { + self.map_mut_helper(&mut f); + } + + fn map_mut_helper(&mut self, f: &mut impl FnMut(&T) -> Tree) { + match self { + Tree::Branch(trees) => trees.iter_mut().for_each(|tree| tree.map_mut_helper(f)), + Tree::Leaf(value) => *self = f(value), + } + } + + pub(super) fn map(self, mut f: impl FnMut(T) -> Tree) -> Tree { + self.map_helper(&mut f) + } + + fn map_helper(self, f: &mut impl FnMut(T) -> Tree) -> Tree { + match self { + Tree::Branch(trees) => Tree::Branch(vecmap(trees, |tree| tree.map_helper(f))), + Tree::Leaf(value) => f(value), + } + } + + /// Unwraps this Tree into the value of the leaf node. Panics if + /// this Tree is a Branch + pub(super) fn into_leaf(self) -> T { + match self { + Tree::Branch(_) => panic!("into_leaf called on a Tree::Branch"), + Tree::Leaf(value) => value, + } + } } impl From for Values { @@ -82,7 +122,7 @@ impl Tree { impl Tree { /// Flattens and evaluates this Tree into a list of ir values /// for return statements, branching instructions, or function parameters. - pub(super) fn into_value_list(self) -> Vec { - vecmap(self.flatten(), Value::eval) + pub(super) fn into_value_list(self, ctx: &mut FunctionContext) -> Vec { + vecmap(self.flatten(), |value| value.eval(ctx)) } } From 64cf49d22b8c5859396f0a72ff84452b4ab0b1b7 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 27 Apr 2023 04:22:05 -0400 Subject: [PATCH 15/66] chore(ssa refactor): Implement function calls (#1235) * Implement ssa-gen for if * Satisfy the clippy gods * Fix printing bug * Print constants directly * Impl for loops * Implement immutable and mutable variables * Implement function calls --- .../src/ssa_refactor/ir/cfg.rs | 8 +- .../src/ssa_refactor/ir/function.rs | 18 +- .../src/ssa_refactor/ir/printer.rs | 4 +- .../ssa_builder/function_builder.rs | 233 ------------------ .../src/ssa_refactor/ssa_builder/mod.rs | 231 ++++++++++++++++- .../src/ssa_refactor/ssa_gen/context.rs | 93 +++++-- .../src/ssa_refactor/ssa_gen/mod.rs | 50 ++-- 7 files changed, 350 insertions(+), 287 deletions(-) delete mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index 3e469361c37..42a2cd573a1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -115,13 +115,14 @@ impl ControlFlowGraph { #[cfg(test)] mod tests { - use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, types::Type}; + use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, map::Id, types::Type}; use super::{super::function::Function, ControlFlowGraph}; #[test] fn empty() { - let mut func = Function::new("func".into()); + let func_id = Id::test_new(0); + let mut func = Function::new("func".into(), func_id); let block_id = func.entry_block(); func.dfg[block_id].set_terminator(TerminatorInstruction::Return { return_values: vec![] }); @@ -139,7 +140,8 @@ mod tests { // block2(): // return () // } - let mut func = Function::new("func".into()); + let func_id = Id::test_new(0); + let mut func = Function::new("func".into(), func_id); let block0_id = func.entry_block(); let cond = func.dfg.add_block_parameter(block0_id, Type::unsigned(1)); let block1_id = func.dfg.make_block(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 1a735726029..ca486d0258a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -18,10 +18,12 @@ pub(crate) struct Function { source_locations: SecondaryMap, /// The first basic block in the function - pub(super) entry_block: BasicBlockId, + entry_block: BasicBlockId, /// Name of the function for debugging only - pub(super) name: String, + name: String, + + id: FunctionId, pub(crate) dfg: DataFlowGraph, } @@ -30,10 +32,18 @@ impl Function { /// Creates a new function with an automatically inserted entry block. /// /// Note that any parameters to the function must be manually added later. - pub(crate) fn new(name: String) -> Self { + pub(crate) fn new(name: String, id: FunctionId) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.make_block(); - Self { name, source_locations: SecondaryMap::new(), entry_block, dfg } + Self { name, source_locations: SecondaryMap::new(), id, entry_block, dfg } + } + + pub(crate) fn name(&self) -> &str { + &self.name + } + + pub(crate) fn id(&self) -> FunctionId { + self.id } pub(crate) fn entry_block(&self) -> BasicBlockId { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 57c573c7bd4..ff46b49b9b4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -14,8 +14,8 @@ use super::{ }; pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { - writeln!(f, "fn {} {{", function.name)?; - display_block_with_successors(function, function.entry_block, &mut HashSet::new(), f)?; + writeln!(f, "fn {} {} {{", function.name(), function.id())?; + display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; write!(f, "}}") } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs deleted file mode 100644 index d11e9a763cd..00000000000 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/function_builder.rs +++ /dev/null @@ -1,233 +0,0 @@ -use acvm::FieldElement; - -use crate::ssa_refactor::ir::{ - basic_block::BasicBlockId, - function::{Function, FunctionId}, - instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, - types::Type, - value::{Value, ValueId}, -}; - -use super::SharedBuilderContext; - -/// The per-function context for each ssa function being generated. -/// -/// This is split from the global SsaBuilder context to allow each function -/// to be potentially built concurrently. -/// -/// Contrary to the name, this struct has the capacity to build as many -/// functions as needed, although it is limited to one function at a time. -pub(crate) struct FunctionBuilder<'ssa> { - global_context: &'ssa SharedBuilderContext, - - current_function: Function, - current_function_id: FunctionId, - - current_block: BasicBlockId, - - finished_functions: Vec<(FunctionId, Function)>, -} - -impl<'ssa> FunctionBuilder<'ssa> { - pub(crate) fn new(function_name: String, context: &'ssa SharedBuilderContext) -> Self { - let new_function = Function::new(function_name); - let current_block = new_function.entry_block(); - - Self { - global_context: context, - current_function: new_function, - current_function_id: context.next_function(), - current_block, - finished_functions: Vec::new(), - } - } - - /// Finish the current function and create a new function - pub(crate) fn new_function(&mut self, name: String) { - let new_function = Function::new(name); - let old_function = std::mem::replace(&mut self.current_function, new_function); - - self.finished_functions.push((self.current_function_id, old_function)); - self.current_function_id = self.global_context.next_function(); - } - - pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { - self.finished_functions.push((self.current_function_id, self.current_function)); - self.finished_functions - } - - pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { - let entry = self.current_function.entry_block(); - self.current_function.dfg.add_block_parameter(entry, typ) - } - - /// Insert a numeric constant into the current function - pub(crate) fn numeric_constant( - &mut self, - value: impl Into, - typ: Type, - ) -> ValueId { - self.current_function.dfg.make_constant(value.into(), typ) - } - - /// Insert a numeric constant into the current function of type Field - pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { - self.numeric_constant(value.into(), Type::field()) - } - - pub(crate) fn type_of_value(&self, value: ValueId) -> Type { - self.current_function.dfg.type_of_value(value) - } - - pub(crate) fn insert_block(&mut self) -> BasicBlockId { - self.current_function.dfg.make_block() - } - - pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { - self.current_function.dfg.add_block_parameter(block, typ) - } - - /// Inserts a new instruction at the end of the current block and returns its results - fn insert_instruction( - &mut self, - instruction: Instruction, - ctrl_typevars: Option>, - ) -> &[ValueId] { - let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); - self.current_function.dfg.insert_instruction_in_block(self.current_block, id); - self.current_function.dfg.instruction_results(id) - } - - /// Switch to inserting instructions in the given block. - /// Expects the given block to be within the same function. If you want to insert - /// instructions into a new function, call new_function instead. - pub(crate) fn switch_to_block(&mut self, block: BasicBlockId) { - self.current_block = block; - } - - /// Insert an allocate instruction at the end of the current block, allocating the - /// given amount of field elements. Returns the result of the allocate instruction, - /// which is always a Reference to the allocated data. - pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { - self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] - } - - /// Insert a Load instruction at the end of the current block, loading from the given offset - /// of the given address which should point to a previous Allocate instruction. Note that - /// this is limited to loading a single value. Loading multiple values (such as a tuple) - /// will require multiple loads. - /// 'offset' is in units of FieldElements here. So loading the fourth FieldElement stored in - /// an array will have an offset of 3. - /// Returns the element that was loaded. - pub(crate) fn insert_load( - &mut self, - mut address: ValueId, - offset: ValueId, - type_to_load: Type, - ) -> ValueId { - if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { - if !offset.is_zero() { - let offset = self.field_constant(offset); - address = self.insert_binary(address, BinaryOp::Add, offset); - } - }; - self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] - } - - /// Insert a Store instruction at the end of the current block, storing the given element - /// at the given address. Expects that the address points somewhere - /// within a previous Allocate instruction. - pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { - self.insert_instruction(Instruction::Store { address, value }, None); - } - - /// Insert a binary instruction at the end of the current block. - /// Returns the result of the binary instruction. - pub(crate) fn insert_binary( - &mut self, - lhs: ValueId, - operator: BinaryOp, - rhs: ValueId, - ) -> ValueId { - let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); - self.insert_instruction(instruction, None)[0] - } - - /// Insert a not instruction at the end of the current block. - /// Returns the result of the instruction. - pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { - self.insert_instruction(Instruction::Not(rhs), None)[0] - } - - /// Insert a cast instruction at the end of the current block. - /// Returns the result of the cast instruction. - pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { - self.insert_instruction(Instruction::Cast(value, typ), None)[0] - } - - /// Insert a constrain instruction at the end of the current block. - pub(crate) fn insert_constrain(&mut self, boolean: ValueId) { - self.insert_instruction(Instruction::Constrain(boolean), None); - } - - /// Terminates the current block with the given terminator instruction - fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { - self.current_function.dfg.set_block_terminator(self.current_block, terminator); - } - - /// Terminate the current block with a jmp instruction to jmp to the given - /// block with the given arguments. - pub(crate) fn terminate_with_jmp( - &mut self, - destination: BasicBlockId, - arguments: Vec, - ) { - self.terminate_block_with(TerminatorInstruction::Jmp { destination, arguments }); - } - - /// Terminate the current block with a jmpif instruction to jmp with the given arguments - /// block with the given arguments. - pub(crate) fn terminate_with_jmpif( - &mut self, - condition: ValueId, - then_destination: BasicBlockId, - else_destination: BasicBlockId, - ) { - self.terminate_block_with(TerminatorInstruction::JmpIf { - condition, - then_destination, - else_destination, - }); - } - - /// Terminate the current block with a return instruction - pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { - self.terminate_block_with(TerminatorInstruction::Return { return_values }); - } - - /// Mutates a load instruction into a store instruction. - /// - /// This function is used while generating ssa-form for assignments currently. - /// To re-use most of the expression infrastructure, the lvalue of an assignment - /// is compiled as an expression and to assign to it we replace the final load - /// (which should always be present to load a mutable value) with a store of the - /// assigned value. - pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { - let (instruction, address) = match &self.current_function.dfg[load_result] { - Value::Instruction { instruction, .. } => { - match &self.current_function.dfg[*instruction] { - Instruction::Load { address } => (*instruction, *address), - other => { - panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") - } - } - } - other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), - }; - - let store = Instruction::Store { address, value: value_to_store }; - self.current_function.dfg.replace_instruction(instruction, store); - // Clear the results of the previous load for safety - self.current_function.dfg.make_instruction_results(instruction, None); - } -} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 8f9ceed800e..fdbaa36308b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -1,19 +1,230 @@ -pub(crate) mod function_builder; +use acvm::FieldElement; use crate::ssa_refactor::ir::{ + basic_block::BasicBlockId, function::{Function, FunctionId}, - map::AtomicCounter, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, + types::Type, + value::{Value, ValueId}, }; -/// The global context while building the ssa representation. -/// Because this may be shared across threads, it is synchronized internally as necessary. -#[derive(Default)] -pub(crate) struct SharedBuilderContext { - function_count: AtomicCounter, +/// The per-function context for each ssa function being generated. +/// +/// This is split from the global SsaBuilder context to allow each function +/// to be potentially built concurrently. +/// +/// Contrary to the name, this struct has the capacity to build as many +/// functions as needed, although it is limited to one function at a time. +pub(crate) struct FunctionBuilder { + current_function: Function, + current_block: BasicBlockId, + finished_functions: Vec<(FunctionId, Function)>, } -impl SharedBuilderContext { - pub(super) fn next_function(&self) -> FunctionId { - self.function_count.next() +impl FunctionBuilder { + pub(crate) fn new(function_name: String, function_id: FunctionId) -> Self { + let new_function = Function::new(function_name, function_id); + let current_block = new_function.entry_block(); + + Self { current_function: new_function, current_block, finished_functions: Vec::new() } + } + + /// Finish the current function and create a new function + pub(crate) fn new_function(&mut self, name: String, function_id: FunctionId) { + let new_function = Function::new(name, function_id); + let old_function = std::mem::replace(&mut self.current_function, new_function); + + self.finished_functions.push((self.current_function.id(), old_function)); + } + + pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { + self.finished_functions.push((self.current_function.id(), self.current_function)); + self.finished_functions + } + + pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { + let entry = self.current_function.entry_block(); + self.current_function.dfg.add_block_parameter(entry, typ) + } + + /// Insert a numeric constant into the current function + pub(crate) fn numeric_constant( + &mut self, + value: impl Into, + typ: Type, + ) -> ValueId { + self.current_function.dfg.make_constant(value.into(), typ) + } + + /// Insert a numeric constant into the current function of type Field + pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::field()) + } + + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.current_function.dfg.type_of_value(value) + } + + pub(crate) fn insert_block(&mut self) -> BasicBlockId { + self.current_function.dfg.make_block() + } + + pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { + self.current_function.dfg.add_block_parameter(block, typ) + } + + /// Inserts a new instruction at the end of the current block and returns its results + fn insert_instruction( + &mut self, + instruction: Instruction, + ctrl_typevars: Option>, + ) -> &[ValueId] { + let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); + self.current_function.dfg.insert_instruction_in_block(self.current_block, id); + self.current_function.dfg.instruction_results(id) + } + + /// Switch to inserting instructions in the given block. + /// Expects the given block to be within the same function. If you want to insert + /// instructions into a new function, call new_function instead. + pub(crate) fn switch_to_block(&mut self, block: BasicBlockId) { + self.current_block = block; + } + + /// Insert an allocate instruction at the end of the current block, allocating the + /// given amount of field elements. Returns the result of the allocate instruction, + /// which is always a Reference to the allocated data. + pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { + self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] + } + + /// Insert a Load instruction at the end of the current block, loading from the given offset + /// of the given address which should point to a previous Allocate instruction. Note that + /// this is limited to loading a single value. Loading multiple values (such as a tuple) + /// will require multiple loads. + /// 'offset' is in units of FieldElements here. So loading the fourth FieldElement stored in + /// an array will have an offset of 3. + /// Returns the element that was loaded. + pub(crate) fn insert_load( + &mut self, + mut address: ValueId, + offset: ValueId, + type_to_load: Type, + ) -> ValueId { + if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { + if !offset.is_zero() { + let offset = self.field_constant(offset); + address = self.insert_binary(address, BinaryOp::Add, offset); + } + }; + self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] + } + + /// Insert a Store instruction at the end of the current block, storing the given element + /// at the given address. Expects that the address points somewhere + /// within a previous Allocate instruction. + pub(crate) fn insert_store(&mut self, address: ValueId, value: ValueId) { + self.insert_instruction(Instruction::Store { address, value }, None); + } + + /// Insert a binary instruction at the end of the current block. + /// Returns the result of the binary instruction. + pub(crate) fn insert_binary( + &mut self, + lhs: ValueId, + operator: BinaryOp, + rhs: ValueId, + ) -> ValueId { + let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); + self.insert_instruction(instruction, None)[0] + } + + /// Insert a not instruction at the end of the current block. + /// Returns the result of the instruction. + pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { + self.insert_instruction(Instruction::Not(rhs), None)[0] + } + + /// Insert a cast instruction at the end of the current block. + /// Returns the result of the cast instruction. + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + self.insert_instruction(Instruction::Cast(value, typ), None)[0] + } + + /// Insert a constrain instruction at the end of the current block. + pub(crate) fn insert_constrain(&mut self, boolean: ValueId) { + self.insert_instruction(Instruction::Constrain(boolean), None); + } + + /// Insert a call instruction a the end of the current block and return + /// the results of the call. + pub(crate) fn insert_call( + &mut self, + func: FunctionId, + arguments: Vec, + result_types: Vec, + ) -> &[ValueId] { + self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)) + } + + /// Terminates the current block with the given terminator instruction + fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { + self.current_function.dfg.set_block_terminator(self.current_block, terminator); + } + + /// Terminate the current block with a jmp instruction to jmp to the given + /// block with the given arguments. + pub(crate) fn terminate_with_jmp( + &mut self, + destination: BasicBlockId, + arguments: Vec, + ) { + self.terminate_block_with(TerminatorInstruction::Jmp { destination, arguments }); + } + + /// Terminate the current block with a jmpif instruction to jmp with the given arguments + /// block with the given arguments. + pub(crate) fn terminate_with_jmpif( + &mut self, + condition: ValueId, + then_destination: BasicBlockId, + else_destination: BasicBlockId, + ) { + self.terminate_block_with(TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + }); + } + + /// Terminate the current block with a return instruction + pub(crate) fn terminate_with_return(&mut self, return_values: Vec) { + self.terminate_block_with(TerminatorInstruction::Return { return_values }); + } + + /// Mutates a load instruction into a store instruction. + /// + /// This function is used while generating ssa-form for assignments currently. + /// To re-use most of the expression infrastructure, the lvalue of an assignment + /// is compiled as an expression and to assign to it we replace the final load + /// (which should always be present to load a mutable value) with a store of the + /// assigned value. + pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { + let (instruction, address) = match &self.current_function.dfg[load_result] { + Value::Instruction { instruction, .. } => { + match &self.current_function.dfg[*instruction] { + Instruction::Load { address } => (*instruction, *address), + other => { + panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") + } + } + } + other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), + }; + + let store = Instruction::Store { address, value: value_to_store }; + self.current_function.dfg.replace_instruction(instruction, store); + // Clear the results of the previous load for safety + self.current_function.dfg.make_instruction_results(instruction, None); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 10206e28c2d..df5329fed92 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -6,13 +6,13 @@ use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; use noirc_frontend::Signedness; +use crate::ssa_refactor::ir::function::Function; +use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::instruction::BinaryOp; +use crate::ssa_refactor::ir::map::AtomicCounter; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId; -use crate::ssa_refactor::ssa_builder::SharedBuilderContext; -use crate::ssa_refactor::{ - ir::function::FunctionId as IrFunctionId, ssa_builder::function_builder::FunctionBuilder, -}; +use crate::ssa_refactor::ssa_builder::FunctionBuilder; use super::value::{Tree, Values}; @@ -22,7 +22,7 @@ type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; pub(super) struct FunctionContext<'a> { definitions: HashMap, - pub(super) builder: FunctionBuilder<'a>, + pub(super) builder: FunctionBuilder, shared_context: &'a SharedContext, } @@ -30,28 +30,32 @@ pub(super) struct FunctionContext<'a> { pub(super) struct SharedContext { functions: RwLock>, function_queue: Mutex, + function_counter: AtomicCounter, + pub(super) program: Program, } impl<'a> FunctionContext<'a> { pub(super) fn new( + function_id: FuncId, function_name: String, parameters: &Parameters, shared_context: &'a SharedContext, - shared_builder_context: &'a SharedBuilderContext, ) -> Self { + let new_id = shared_context.get_or_queue_function(function_id); + let mut this = Self { definitions: HashMap::new(), - builder: FunctionBuilder::new(function_name, shared_builder_context), + builder: FunctionBuilder::new(function_name, new_id), shared_context, }; this.add_parameters_to_scope(parameters); this } - pub(super) fn new_function(&mut self, name: String, parameters: &Parameters) { + pub(super) fn new_function(&mut self, id: IrFunctionId, name: String, parameters: &Parameters) { self.definitions.clear(); - self.builder.new_function(name); + self.builder.new_function(name, id); self.add_parameters_to_scope(parameters); } @@ -72,7 +76,7 @@ impl<'a> FunctionContext<'a> { fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { // Add a separate parameter for each field type in 'parameter_type' let parameter_value = - self.map_type(parameter_type, |this, typ| this.builder.add_parameter(typ).into()); + Self::map_type(parameter_type, |typ| self.builder.add_parameter(typ).into()); self.definitions.insert(parameter_id, parameter_value); } @@ -81,12 +85,8 @@ impl<'a> FunctionContext<'a> { /// /// This can be used to (for example) flatten a tuple type, creating /// and returning a new parameter for each field type. - pub(super) fn map_type( - &mut self, - typ: &ast::Type, - mut f: impl FnMut(&mut Self, Type) -> T, - ) -> Tree { - Self::map_type_helper(typ, &mut |typ| f(self, typ)) + pub(super) fn map_type(typ: &ast::Type, mut f: impl FnMut(Type) -> T) -> Tree { + Self::map_type_helper(typ, &mut f) } // This helper is needed because we need to take f by mutable reference, @@ -157,6 +157,30 @@ impl<'a> FunctionContext<'a> { result.into() } + /// Inserts a call instruction at the end of the current block and returns the results + /// of the call. + /// + /// Compared to self.builder.insert_call, this version will reshape the returned Vec + /// back into a Values tree of the proper shape. + pub(super) fn insert_call( + &mut self, + function: IrFunctionId, + arguments: Vec, + result_type: &ast::Type, + ) -> Values { + let result_types = Self::convert_type(result_type).flatten(); + let results = self.builder.insert_call(function, arguments, result_types); + + let mut i = 0; + let reshaped_return_values = Self::map_type(result_type, |_| { + let result = results[i].into(); + i += 1; + result + }); + assert_eq!(i, results.len()); + reshaped_return_values + } + /// Create a const offset of an address for an array load or store pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { if offset != 0 { @@ -215,6 +239,13 @@ impl<'a> FunctionContext<'a> { } } } + + /// Retrieves the given function, adding it to the function queue + /// if it is not yet compiled. + pub(super) fn get_or_queue_function(&self, id: FuncId) -> Values { + let function = self.shared_context.get_or_queue_function(id); + Values::Leaf(super::value::Value::Function(function)) + } } /// True if the given operator cannot be encoded directly and needs @@ -260,10 +291,38 @@ fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { impl SharedContext { pub(super) fn new(program: Program) -> Self { - Self { functions: Default::default(), function_queue: Default::default(), program } + Self { + functions: Default::default(), + function_queue: Default::default(), + function_counter: Default::default(), + program, + } } pub(super) fn pop_next_function_in_queue(&self) -> Option<(ast::FuncId, IrFunctionId)> { self.function_queue.lock().expect("Failed to lock function_queue").pop() } + + /// Return the matching id for the given function if known. If it is not known this + /// will add the function to the queue of functions to compile, assign it a new id, + /// and return this new id. + pub(super) fn get_or_queue_function(&self, id: ast::FuncId) -> IrFunctionId { + // Start a new block to guarantee the destructor for the map lock is released + // before map needs to be aquired again in self.functions.write() below + { + let map = self.functions.read().expect("Failed to read self.functions"); + if let Some(existing_id) = map.get(&id) { + return *existing_id; + } + } + + let next_id = self.function_counter.next(); + + let mut queue = self.function_queue.lock().expect("Failed to lock function queue"); + queue.push((id, next_id)); + + self.functions.write().expect("Failed to write to self.functions").insert(id, next_id); + + next_id + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index a7880032d42..4aad2aafec1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -11,25 +11,21 @@ use self::{ value::{Tree, Values}, }; -use super::{ - ir::{instruction::BinaryOp, types::Type, value::ValueId}, - ssa_builder::SharedBuilderContext, -}; +use super::ir::{function::FunctionId, instruction::BinaryOp, types::Type, value::ValueId}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); - let builder_context = SharedBuilderContext::default(); let main = context.program.main(); - let mut function_context = - FunctionContext::new(main.name.clone(), &main.parameters, &context, &builder_context); + let main_id = Program::main_id(); + let main_name = main.name.clone(); + let mut function_context = FunctionContext::new(main_id, main_name, &main.parameters, &context); function_context.codegen_expression(&main.body); - while let Some((src_function_id, _new_id)) = context.pop_next_function_in_queue() { + while let Some((src_function_id, dest_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; - // TODO: Need to ensure/assert the new function's id == new_id - function_context.new_function(function.name.clone(), &function.parameters); + function_context.new_function(dest_id, function.name.clone(), &function.parameters); function_context.codegen_expression(&function.body); } } @@ -69,7 +65,7 @@ impl<'a> FunctionContext<'a> { fn codegen_ident(&mut self, ident: &ast::Ident) -> Values { match &ident.definition { ast::Definition::Local(id) => self.lookup(*id).map(|value| value.eval(self).into()), - ast::Definition::Function(_) => todo!(), + ast::Definition::Function(id) => self.get_or_queue_function(*id), ast::Definition::Builtin(_) => todo!(), ast::Definition::LowLevel(_) => todo!(), } @@ -165,10 +161,10 @@ impl<'a> FunctionContext<'a> { let base_index = self.builder.insert_binary(base_offset, BinaryOp::Mul, type_size); let mut field_index = 0u128; - self.map_type(element_type, |ctx, typ| { - let offset = ctx.make_offset(base_index, field_index); + Self::map_type(element_type, |typ| { + let offset = self.make_offset(base_index, field_index); field_index += 1; - ctx.builder.insert_load(array, offset, typ).into() + self.builder.insert_load(array, offset, typ).into() }) } @@ -229,8 +225,8 @@ impl<'a> FunctionContext<'a> { // Create block arguments for the end block as needed to branch to // with our then and else value. - result = self.map_type(&if_expr.typ, |ctx, typ| { - ctx.builder.add_block_parameter(end_block, typ).into() + result = Self::map_type(&if_expr.typ, |typ| { + self.builder.add_block_parameter(end_block, typ).into() }); let else_values = else_value.into_value_list(self); @@ -259,8 +255,26 @@ impl<'a> FunctionContext<'a> { Self::get_field(tuple, field_index) } - fn codegen_call(&mut self, _call: &ast::Call) -> Values { - todo!() + fn codegen_function(&mut self, function: &Expression) -> FunctionId { + use crate::ssa_refactor::ssa_gen::value::Value; + match self.codegen_expression(function) { + Tree::Leaf(Value::Function(id)) => id, + other => { + panic!("codegen_function: expected function value, found {other:?}") + } + } + } + + fn codegen_call(&mut self, call: &ast::Call) -> Values { + let function = self.codegen_function(&call.func); + + let arguments = call + .arguments + .iter() + .flat_map(|argument| self.codegen_expression(argument).into_value_list(self)) + .collect(); + + self.insert_call(function, arguments, &call.return_type) } fn codegen_let(&mut self, let_expr: &ast::Let) -> Values { From 4e198c0b8190d5bfb2167b929a5d5a8a9106a4b0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 27 Apr 2023 17:29:48 +0100 Subject: [PATCH 16/66] chore: address clippy warnings (#1239) --- .../src/ssa_refactor/ir/cfg.rs | 66 ++++++++++--------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index 42a2cd573a1..d443d574ca8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -161,32 +161,33 @@ mod tests { let mut cfg = ControlFlowGraph::with_function(&func); + #[allow(clippy::needless_collect)] { - let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); - let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); - let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); - let block0_successors = cfg.succ_iter(block0_id).collect::>(); - let block1_successors = cfg.succ_iter(block1_id).collect::>(); - let block2_successors = cfg.succ_iter(block2_id).collect::>(); + let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); + let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); + let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); assert_eq!(block2_predecessors.len(), 2); - assert_eq!(block1_predecessors.contains(&block0_id), true); - assert_eq!(block1_predecessors.contains(&block1_id), true); - assert_eq!(block2_predecessors.contains(&block0_id), true); - assert_eq!(block2_predecessors.contains(&block1_id), true); + assert!(block1_predecessors.contains(&block0_id)); + assert!(block1_predecessors.contains(&block1_id)); + assert!(block2_predecessors.contains(&block0_id)); + assert!(block2_predecessors.contains(&block1_id)); assert_eq!(block0_successors.len(), 2); assert_eq!(block1_successors.len(), 2); assert_eq!(block2_successors.len(), 0); - assert_eq!(block0_successors.contains(&block1_id), true); - assert_eq!(block0_successors.contains(&block2_id), true); - assert_eq!(block1_successors.contains(&block1_id), true); - assert_eq!(block1_successors.contains(&block2_id), true); + assert!(block0_successors.contains(&block1_id)); + assert!(block0_successors.contains(&block2_id)); + assert!(block1_successors.contains(&block1_id)); + assert!(block1_successors.contains(&block2_id)); } // Modify function to form: @@ -214,37 +215,38 @@ mod tests { }); // Recompute new and changed blocks - cfg.recompute_block(&mut func, block0_id); - cfg.recompute_block(&mut func, block2_id); - cfg.recompute_block(&mut func, ret_block_id); + cfg.recompute_block(&func, block0_id); + cfg.recompute_block(&func, block2_id); + cfg.recompute_block(&func, ret_block_id); + #[allow(clippy::needless_collect)] { - let block0_predecessors = cfg.pred_iter(block0_id).collect::>(); - let block1_predecessors = cfg.pred_iter(block1_id).collect::>(); - let block2_predecessors = cfg.pred_iter(block2_id).collect::>(); + let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); - let block0_successors = cfg.succ_iter(block0_id).collect::>(); - let block1_successors = cfg.succ_iter(block1_id).collect::>(); - let block2_successors = cfg.succ_iter(block2_id).collect::>(); + let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); + let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); + let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); assert_eq!(block2_predecessors.len(), 1); - assert_eq!(block1_predecessors.contains(&block0_id), true); - assert_eq!(block1_predecessors.contains(&block1_id), true); - assert_eq!(block2_predecessors.contains(&block0_id), false); - assert_eq!(block2_predecessors.contains(&block1_id), true); + assert!(block1_predecessors.contains(&block0_id)); + assert!(block1_predecessors.contains(&block1_id)); + assert!(!block2_predecessors.contains(&block0_id)); + assert!(block2_predecessors.contains(&block1_id)); assert_eq!(block0_successors.len(), 2); assert_eq!(block1_successors.len(), 2); assert_eq!(block2_successors.len(), 1); - assert_eq!(block0_successors.contains(&block1_id), true); - assert_eq!(block0_successors.contains(&ret_block_id), true); - assert_eq!(block1_successors.contains(&block1_id), true); - assert_eq!(block1_successors.contains(&block2_id), true); - assert_eq!(block2_successors.contains(&ret_block_id), true); + assert!(block0_successors.contains(&block1_id)); + assert!(block0_successors.contains(&ret_block_id)); + assert!(block1_successors.contains(&block1_id)); + assert!(block1_successors.contains(&block2_id)); + assert!(block2_successors.contains(&ret_block_id)); } } } From 606b7d0db77fccde1bb14c4c3f91f424436e4ea6 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 27 Apr 2023 14:36:24 -0400 Subject: [PATCH 17/66] chore(ssa refactor): Implement first-class functions (#1238) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/dfg.rs | 21 ++++++++++++++++--- .../src/ssa_refactor/ir/function.rs | 10 +++++---- .../src/ssa_refactor/ir/instruction.rs | 6 ++---- .../src/ssa_refactor/ir/map.rs | 14 ------------- .../src/ssa_refactor/ir/printer.rs | 17 +++++++++------ .../src/ssa_refactor/ir/value.rs | 13 +++++++++++- .../src/ssa_refactor/ssa_builder/mod.rs | 2 +- .../src/ssa_refactor/ssa_gen/context.rs | 2 +- .../src/ssa_refactor/ssa_gen/mod.rs | 14 ++----------- 9 files changed, 53 insertions(+), 46 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index ab2018b1df8..60591da311c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -1,9 +1,11 @@ +use std::collections::HashMap; + use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, - function::Signature, + function::{FunctionId, Signature}, instruction::{Instruction, InstructionId, InstructionResultType, TerminatorInstruction}, - map::{DenseMap, Id, SecondaryMap, TwoWayMap}, + map::{DenseMap, Id, TwoWayMap}, types::Type, value::{Value, ValueId}, }; @@ -53,7 +55,7 @@ pub(crate) struct DataFlowGraph { /// Currently, we need to define them in a better way /// Call instructions require the func signature, but /// other instructions may need some more reading on my part - results: SecondaryMap, + results: HashMap, /// Storage for all of the values defined in this /// function. @@ -64,6 +66,11 @@ pub(crate) struct DataFlowGraph { /// twice will return the same ConstantId. constants: TwoWayMap, + /// Contains each function that has been imported into the current function. + /// Each function's Value::Function is uniqued here so any given FunctionId + /// will always have the same ValueId within this function. + functions: HashMap, + /// Function signatures of external methods signatures: DenseMap, @@ -150,6 +157,14 @@ impl DataFlowGraph { self.values.insert(Value::NumericConstant { constant, typ }) } + /// Gets or creates a ValueId for the given FunctionId. + pub(crate) fn import_function(&mut self, function: FunctionId) -> ValueId { + if let Some(existing) = self.functions.get(&function) { + return *existing; + } + self.values.insert(Value::Function { id: function }) + } + /// Attaches results to the instruction, clearing any previous results. /// /// This does not normally need to be called manually as it is called within diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index ca486d0258a..e40c086c0e6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,7 +1,9 @@ +use std::collections::HashMap; + use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; -use super::instruction::Instruction; -use super::map::{Id, SecondaryMap}; +use super::instruction::InstructionId; +use super::map::Id; use super::types::Type; use noirc_errors::Location; @@ -15,7 +17,7 @@ use noirc_errors::Location; #[derive(Debug)] pub(crate) struct Function { /// Maps instructions to source locations - source_locations: SecondaryMap, + source_locations: HashMap, /// The first basic block in the function entry_block: BasicBlockId, @@ -35,7 +37,7 @@ impl Function { pub(crate) fn new(name: String, id: FunctionId) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.make_block(); - Self { name, source_locations: SecondaryMap::new(), id, entry_block, dfg } + Self { name, source_locations: HashMap::new(), id, entry_block, dfg } } pub(crate) fn name(&self) -> &str { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 11c6b8dc05f..5e9e7229e3a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,6 +1,4 @@ -use super::{ - basic_block::BasicBlockId, function::FunctionId, map::Id, types::Type, value::ValueId, -}; +use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; /// Reference to an instruction pub(crate) type InstructionId = Id; @@ -41,7 +39,7 @@ pub(crate) enum Instruction { Constrain(ValueId), /// Performs a function call with a list of its arguments. - Call { func: FunctionId, arguments: Vec }, + Call { func: ValueId, arguments: Vec }, /// Performs a call to an intrinsic function and stores the /// results in `return_arguments`. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 24b30241293..a99ff06c5fb 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -260,20 +260,6 @@ impl std::ops::Index> for TwoWayMap { } } -/// A SecondaryMap is for storing secondary data for a given key. Since this -/// map is for secondary data, it will not return fresh Ids for data, instead -/// it expects users to provide these ids in order to associate existing ids with -/// additional data. -/// -/// Unlike SecondaryMap in cranelift, this version is sparse and thus -/// does not require inserting default elements for each key in between -/// the desired key and the previous length of the map. -/// -/// There is no expectation that there is always secondary data for all relevant -/// Ids of a given type, so unlike the other Map types, it is possible for -/// a call to .get(id) to return None. -pub(crate) type SecondaryMap = HashMap, V>; - /// A simple counter to create fresh Ids without any storage. /// Useful for assigning ids before the storage is created or assigning ids /// for types that have no single owner. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index ff46b49b9b4..4873f436dca 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -55,12 +55,17 @@ pub(crate) fn display_block( display_terminator(function, block.terminator(), f) } -/// Specialize displaying value ids so that if they refer to constants we -/// print the constant directly +/// Specialize displaying value ids so that if they refer to a numeric +/// constant or a function we print those directly. fn value(function: &Function, id: ValueId) -> String { - match function.dfg.get_numeric_constant_with_type(id) { - Some((value, typ)) => format!("{} {}", value, typ), - None => id.to_string(), + use super::value::Value; + match &function.dfg[id] { + Value::NumericConstant { constant, typ } => { + let value = function.dfg[*constant].value(); + format!("{} {}", typ, value) + } + Value::Function { id } => id.to_string(), + _ => id.to_string(), } } @@ -120,7 +125,7 @@ pub(crate) fn display_instruction( writeln!(f, "constrain {}", show(*value)) } Instruction::Call { func, arguments } => { - writeln!(f, "call {func}({})", value_list(function, arguments)) + writeln!(f, "call {}({})", show(*func), value_list(function, arguments)) } Instruction::Intrinsic { func, arguments } => { writeln!(f, "intrinsic {func}({})", value_list(function, arguments)) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index a559522fadd..39228ae655b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,6 +1,9 @@ use crate::ssa_refactor::ir::basic_block::BasicBlockId; -use super::{constant::NumericConstantId, instruction::InstructionId, map::Id, types::Type}; +use super::{ + constant::NumericConstantId, function::FunctionId, instruction::InstructionId, map::Id, + types::Type, +}; pub(crate) type ValueId = Id; @@ -27,6 +30,13 @@ pub(crate) enum Value { /// This Value originates from a numeric constant NumericConstant { constant: NumericConstantId, typ: Type }, + + /// This Value refers to a function in the IR. + /// Functions always have the type Type::Function. + /// If the argument or return types are needed, users should retrieve + /// their types via the Call instruction's arguments or the Call instruction's + /// result types respectively. + Function { id: FunctionId }, } impl Value { @@ -35,6 +45,7 @@ impl Value { Value::Instruction { typ, .. } => *typ, Value::Param { typ, .. } => *typ, Value::NumericConstant { typ, .. } => *typ, + Value::Function { .. } => Type::Function, } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index fdbaa36308b..7da88e47157 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -160,7 +160,7 @@ impl FunctionBuilder { /// the results of the call. pub(crate) fn insert_call( &mut self, - func: FunctionId, + func: ValueId, arguments: Vec, result_types: Vec, ) -> &[ValueId] { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index df5329fed92..bd04f90d063 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -164,7 +164,7 @@ impl<'a> FunctionContext<'a> { /// back into a Values tree of the proper shape. pub(super) fn insert_call( &mut self, - function: IrFunctionId, + function: ValueId, arguments: Vec, result_type: &ast::Type, ) -> Values { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 4aad2aafec1..8475b3c84c7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -11,7 +11,7 @@ use self::{ value::{Tree, Values}, }; -use super::ir::{function::FunctionId, instruction::BinaryOp, types::Type, value::ValueId}; +use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; pub(crate) fn generate_ssa(program: Program) { let context = SharedContext::new(program); @@ -255,18 +255,8 @@ impl<'a> FunctionContext<'a> { Self::get_field(tuple, field_index) } - fn codegen_function(&mut self, function: &Expression) -> FunctionId { - use crate::ssa_refactor::ssa_gen::value::Value; - match self.codegen_expression(function) { - Tree::Leaf(Value::Function(id)) => id, - other => { - panic!("codegen_function: expected function value, found {other:?}") - } - } - } - fn codegen_call(&mut self, call: &ast::Call) -> Values { - let function = self.codegen_function(&call.func); + let function = self.codegen_non_tuple_expression(&call.func); let arguments = call .arguments From 06427e5bccaa40b0eb0e3f30388a98dc25cf558f Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 27 Apr 2023 15:52:04 -0400 Subject: [PATCH 18/66] chore(ssa refactor): Implement intrinsics (#1241) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray * Implement intrinsics --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/dfg.rs | 19 +++++- .../src/ssa_refactor/ir/instruction.rs | 63 +++++++++++++------ .../src/ssa_refactor/ir/printer.rs | 6 +- .../src/ssa_refactor/ir/value.rs | 12 +++- .../src/ssa_refactor/ssa_builder/mod.rs | 15 +++++ .../src/ssa_refactor/ssa_gen/context.rs | 4 +- .../src/ssa_refactor/ssa_gen/mod.rs | 8 ++- .../src/ssa_refactor/ssa_gen/value.rs | 3 - 8 files changed, 96 insertions(+), 34 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 60591da311c..4d2ebe31efb 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -4,7 +4,9 @@ use super::{ basic_block::{BasicBlock, BasicBlockId}, constant::{NumericConstant, NumericConstantId}, function::{FunctionId, Signature}, - instruction::{Instruction, InstructionId, InstructionResultType, TerminatorInstruction}, + instruction::{ + Instruction, InstructionId, InstructionResultType, Intrinsic, TerminatorInstruction, + }, map::{DenseMap, Id, TwoWayMap}, types::Type, value::{Value, ValueId}, @@ -71,6 +73,11 @@ pub(crate) struct DataFlowGraph { /// will always have the same ValueId within this function. functions: HashMap, + /// Contains each intrinsic that has been imported into the current function. + /// This map is used to ensure that the ValueId for any given intrinsic is always + /// represented by only 1 ValueId within this function. + intrinsics: HashMap, + /// Function signatures of external methods signatures: DenseMap, @@ -162,7 +169,15 @@ impl DataFlowGraph { if let Some(existing) = self.functions.get(&function) { return *existing; } - self.values.insert(Value::Function { id: function }) + self.values.insert(Value::Function(function)) + } + + /// Gets or creates a ValueId for the given Intrinsic. + pub(crate) fn import_intrinsic(&mut self, intrinsic: Intrinsic) -> ValueId { + if let Some(existing) = self.intrinsics.get(&intrinsic) { + return *existing; + } + self.values.insert(Value::Intrinsic(intrinsic)) } /// Attaches results to the instruction, clearing any previous results. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 5e9e7229e3a..756c7ae5a13 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,9 +1,10 @@ +use acvm::acir::BlackBoxFunc; + use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; /// Reference to an instruction pub(crate) type InstructionId = Id; -#[derive(Debug, PartialEq, Eq, Hash, Clone)] /// These are similar to built-ins in other languages. /// These can be classified under two categories: /// - Opcodes which the IR knows the target machine has @@ -11,14 +12,50 @@ pub(crate) type InstructionId = Id; /// - Opcodes which have no function definition in the /// source code and must be processed by the IR. An example /// of this is println. -pub(crate) struct IntrinsicOpcodes; +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) enum Intrinsic { + Sort, + Println, + ToBits(Endian), + ToRadix(Endian), + BlackBox(BlackBoxFunc), +} + +impl std::fmt::Display for Intrinsic { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Intrinsic::Println => write!(f, "println"), + Intrinsic::Sort => write!(f, "sort"), + Intrinsic::ToBits(Endian::Big) => write!(f, "to_be_bits"), + Intrinsic::ToBits(Endian::Little) => write!(f, "to_le_bits"), + Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), + Intrinsic::ToRadix(Endian::Little) => write!(f, "to_le_radix"), + Intrinsic::BlackBox(function) => write!(f, "{function}"), + } + } +} -impl std::fmt::Display for IntrinsicOpcodes { - fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - todo!("intrinsics have no opcodes yet") +impl Intrinsic { + pub(crate) fn lookup(name: &str) -> Option { + match name { + "println" => Some(Intrinsic::Println), + "array_sort" => Some(Intrinsic::Sort), + "to_le_radix" => Some(Intrinsic::ToRadix(Endian::Little)), + "to_be_radix" => Some(Intrinsic::ToRadix(Endian::Big)), + "to_le_bits" => Some(Intrinsic::ToBits(Endian::Little)), + "to_be_bits" => Some(Intrinsic::ToBits(Endian::Big)), + other => BlackBoxFunc::lookup(other).map(Intrinsic::BlackBox), + } } } +/// The endian-ness of bits when encoding values as bits in e.g. ToBits or ToRadix +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] +pub(crate) enum Endian { + Big, + Little, +} + #[derive(Debug, PartialEq, Eq, Hash, Clone)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. @@ -41,10 +78,6 @@ pub(crate) enum Instruction { /// Performs a function call with a list of its arguments. Call { func: ValueId, arguments: Vec }, - /// Performs a call to an intrinsic function and stores the - /// results in `return_arguments`. - Intrinsic { func: IntrinsicOpcodes, arguments: Vec }, - /// Allocates a region of memory. Note that this is not concerned with /// the type of memory, the type of element is determined when loading this memory. /// @@ -72,9 +105,6 @@ impl Instruction { Instruction::Constrain(_) => 0, // This returns 0 as the result depends on the function being called Instruction::Call { .. } => 0, - // This also returns 0, but we could get it a compile time, - // since we know the signatures for the intrinsics - Instruction::Intrinsic { .. } => 0, Instruction::Allocate { .. } => 1, Instruction::Load { .. } => 1, Instruction::Store { .. } => 0, @@ -94,9 +124,6 @@ impl Instruction { Instruction::Constrain(_) => 1, // This returns 0 as the arguments depend on the function being called Instruction::Call { .. } => 0, - // This also returns 0, but we could get it a compile time, - // since we know the function definition for the intrinsics - Instruction::Intrinsic { .. } => 0, Instruction::Allocate { size: _ } => 1, Instruction::Load { address: _ } => 1, Instruction::Store { address: _, value: _ } => 2, @@ -113,9 +140,7 @@ impl Instruction { InstructionResultType::Operand(*value) } Instruction::Constrain(_) | Instruction::Store { .. } => InstructionResultType::None, - Instruction::Load { .. } | Instruction::Call { .. } | Instruction::Intrinsic { .. } => { - InstructionResultType::Unknown - } + Instruction::Load { .. } | Instruction::Call { .. } => InstructionResultType::Unknown, } } } @@ -129,7 +154,7 @@ pub(crate) enum InstructionResultType { Known(Type), /// The result type of this function is unknown and separate from its operand types. - /// This occurs for function and intrinsic calls. + /// This occurs for function calls and load operations. Unknown, /// This instruction does not return any results. diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 4873f436dca..1471bd46e35 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -64,7 +64,8 @@ fn value(function: &Function, id: ValueId) -> String { let value = function.dfg[*constant].value(); format!("{} {}", typ, value) } - Value::Function { id } => id.to_string(), + Value::Function(id) => id.to_string(), + Value::Intrinsic(intrinsic) => intrinsic.to_string(), _ => id.to_string(), } } @@ -127,9 +128,6 @@ pub(crate) fn display_instruction( Instruction::Call { func, arguments } => { writeln!(f, "call {}({})", show(*func), value_list(function, arguments)) } - Instruction::Intrinsic { func, arguments } => { - writeln!(f, "intrinsic {func}({})", value_list(function, arguments)) - } Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), Instruction::Load { address } => writeln!(f, "load {}", show(*address)), Instruction::Store { address, value } => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 39228ae655b..d7d8d8a41ab 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -1,7 +1,10 @@ use crate::ssa_refactor::ir::basic_block::BasicBlockId; use super::{ - constant::NumericConstantId, function::FunctionId, instruction::InstructionId, map::Id, + constant::NumericConstantId, + function::FunctionId, + instruction::{InstructionId, Intrinsic}, + map::Id, types::Type, }; @@ -36,7 +39,11 @@ pub(crate) enum Value { /// If the argument or return types are needed, users should retrieve /// their types via the Call instruction's arguments or the Call instruction's /// result types respectively. - Function { id: FunctionId }, + Function(FunctionId), + + /// An Intrinsic is a special kind of builtin function that may be handled internally + /// or optimized into a special form. + Intrinsic(Intrinsic), } impl Value { @@ -46,6 +53,7 @@ impl Value { Value::Param { typ, .. } => *typ, Value::NumericConstant { typ, .. } => *typ, Value::Function { .. } => Type::Function, + Value::Intrinsic { .. } => Type::Function, } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 7da88e47157..6c407dfcd42 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -8,6 +8,8 @@ use crate::ssa_refactor::ir::{ value::{Value, ValueId}, }; +use super::ir::instruction::Intrinsic; + /// The per-function context for each ssa function being generated. /// /// This is split from the global SsaBuilder context to allow each function @@ -227,4 +229,17 @@ impl FunctionBuilder { // Clear the results of the previous load for safety self.current_function.dfg.make_instruction_results(instruction, None); } + + /// Returns a ValueId pointing to the given function or imports the function + /// into the current function if it was not already, and returns that ID. + pub(crate) fn import_function(&mut self, function: FunctionId) -> ValueId { + self.current_function.dfg.import_function(function) + } + + /// Retrieve a value reference to the given intrinsic operation. + /// Returns None if there is no intrinsic matching the given name. + pub(crate) fn import_intrinsic(&mut self, name: &str) -> Option { + Intrinsic::lookup(name) + .map(|intrinsic| self.current_function.dfg.import_intrinsic(intrinsic)) + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index bd04f90d063..909ed4ff84d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -242,9 +242,9 @@ impl<'a> FunctionContext<'a> { /// Retrieves the given function, adding it to the function queue /// if it is not yet compiled. - pub(super) fn get_or_queue_function(&self, id: FuncId) -> Values { + pub(super) fn get_or_queue_function(&mut self, id: FuncId) -> Values { let function = self.shared_context.get_or_queue_function(id); - Values::Leaf(super::value::Value::Function(function)) + self.builder.import_function(function).into() } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 8475b3c84c7..715f835ab7f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -66,8 +66,12 @@ impl<'a> FunctionContext<'a> { match &ident.definition { ast::Definition::Local(id) => self.lookup(*id).map(|value| value.eval(self).into()), ast::Definition::Function(id) => self.get_or_queue_function(*id), - ast::Definition::Builtin(_) => todo!(), - ast::Definition::LowLevel(_) => todo!(), + ast::Definition::Builtin(name) | ast::Definition::LowLevel(name) => { + match self.builder.import_intrinsic(name) { + Some(builtin) => builtin.into(), + None => panic!("No builtin function named '{name}' found"), + } + } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 52ff52d75f2..410e375fcd6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -1,6 +1,5 @@ use iter_extended::vecmap; -use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId as IrValueId; @@ -15,7 +14,6 @@ pub(super) enum Tree { #[derive(Debug, Copy, Clone)] pub(super) enum Value { Normal(IrValueId), - Function(IrFunctionId), /// A mutable variable that must be loaded as the given type before being used Mutable(IrValueId, Type), @@ -32,7 +30,6 @@ impl Value { let offset = ctx.builder.field_constant(0u128); ctx.builder.insert_load(address, offset, typ) } - Value::Function(_) => panic!("Tried to evaluate a function value"), } } } From ed4691bcdf9cefbeaa57f0b3b7bce0acadb11424 Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 28 Apr 2023 11:35:42 -0400 Subject: [PATCH 19/66] chore(ssa refactor): Fix no returns & duplicate main (#1243) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray * Implement intrinsics * Fix no return & duplicate main * bad git. remove duplicated functions * Remove Option in builder * Undo debug printing in driver --------- Co-authored-by: kevaundray --- crates/noirc_evaluator/src/ssa_refactor.rs | 2 +- .../src/ssa_refactor/ir/function.rs | 2 +- .../src/ssa_refactor/ir/printer.rs | 16 +++++++++-- .../src/ssa_refactor/ssa_builder/mod.rs | 15 ++++++----- .../src/ssa_refactor/ssa_gen/context.rs | 13 +++++---- .../src/ssa_refactor/ssa_gen/mod.rs | 27 ++++++++++++++----- .../src/ssa_refactor/ssa_gen/program.rs | 23 ++++++++++++++++ 7 files changed, 74 insertions(+), 24 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index 37f1ead2b07..fc45071e579 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -9,4 +9,4 @@ mod ir; mod ssa_builder; -mod ssa_gen; +pub mod ssa_gen; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index e40c086c0e6..6789e5364fe 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -15,7 +15,7 @@ use noirc_errors::Location; /// To reference external functions, one must first import the function signature /// into the current function's context. #[derive(Debug)] -pub(crate) struct Function { +pub struct Function { /// Maps instructions to source locations source_locations: HashMap, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 1471bd46e35..b0e6d787a6a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -46,7 +46,7 @@ pub(crate) fn display_block( ) -> Result { let block = &function.dfg[block_id]; - writeln!(f, " {}({}):", block_id, value_list(function, block.parameters()))?; + writeln!(f, " {}({}):", block_id, value_list_with_types(function, block.parameters()))?; for instruction in block.instructions() { display_instruction(function, *instruction, f)?; @@ -70,6 +70,16 @@ fn value(function: &Function, id: ValueId) -> String { } } +/// Display each value along with its type. E.g. `v0: Field, v1: u64, v2: u1` +fn value_list_with_types(function: &Function, values: &[ValueId]) -> String { + vecmap(values, |id| { + let value = value(function, *id); + let typ = function.dfg.type_of_value(*id); + format!("{value}: {typ}") + }) + .join(", ") +} + fn value_list(function: &Function, values: &[ValueId]) -> String { vecmap(values, |id| value(function, *id)).join(", ") } @@ -87,7 +97,9 @@ pub(crate) fn display_terminator( writeln!( f, " jmpif {} then: {}, else: {}", - condition, then_destination, else_destination + value(function, *condition), + then_destination, + else_destination ) } Some(TerminatorInstruction::Return { return_values }) => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 6c407dfcd42..35c918d645d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -8,7 +8,7 @@ use crate::ssa_refactor::ir::{ value::{Value, ValueId}, }; -use super::ir::instruction::Intrinsic; +use super::{ir::instruction::Intrinsic, ssa_gen::Ssa}; /// The per-function context for each ssa function being generated. /// @@ -20,7 +20,7 @@ use super::ir::instruction::Intrinsic; pub(crate) struct FunctionBuilder { current_function: Function, current_block: BasicBlockId, - finished_functions: Vec<(FunctionId, Function)>, + finished_functions: Vec, } impl FunctionBuilder { @@ -34,14 +34,15 @@ impl FunctionBuilder { /// Finish the current function and create a new function pub(crate) fn new_function(&mut self, name: String, function_id: FunctionId) { let new_function = Function::new(name, function_id); - let old_function = std::mem::replace(&mut self.current_function, new_function); + self.current_block = new_function.entry_block(); - self.finished_functions.push((self.current_function.id(), old_function)); + let old_function = std::mem::replace(&mut self.current_function, new_function); + self.finished_functions.push(old_function); } - pub(crate) fn finish(mut self) -> Vec<(FunctionId, Function)> { - self.finished_functions.push((self.current_function.id(), self.current_function)); - self.finished_functions + pub(crate) fn finish(mut self) -> Ssa { + self.finished_functions.push(self.current_function); + Ssa::new(self.finished_functions) } pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 909ed4ff84d..3a730cca827 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -37,18 +37,17 @@ pub(super) struct SharedContext { impl<'a> FunctionContext<'a> { pub(super) fn new( - function_id: FuncId, function_name: String, parameters: &Parameters, shared_context: &'a SharedContext, ) -> Self { - let new_id = shared_context.get_or_queue_function(function_id); + let function_id = shared_context + .pop_next_function_in_queue() + .expect("No function in queue for the FunctionContext to compile") + .1; - let mut this = Self { - definitions: HashMap::new(), - builder: FunctionBuilder::new(function_name, new_id), - shared_context, - }; + let builder = FunctionBuilder::new(function_name, function_id); + let mut this = Self { definitions: HashMap::new(), builder, shared_context }; this.add_parameters_to_scope(parameters); this } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 715f835ab7f..8b168b08836 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -1,6 +1,9 @@ mod context; +mod program; mod value; +pub use program::Ssa; + use context::SharedContext; use iter_extended::vecmap; use noirc_errors::Location; @@ -13,24 +16,36 @@ use self::{ use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; -pub(crate) fn generate_ssa(program: Program) { +pub fn generate_ssa(program: Program) -> Ssa { let context = SharedContext::new(program); - let main = context.program.main(); let main_id = Program::main_id(); - let main_name = main.name.clone(); + let main = context.program.main(); + + // Queue the main function for compilation + context.get_or_queue_function(main_id); - let mut function_context = FunctionContext::new(main_id, main_name, &main.parameters, &context); - function_context.codegen_expression(&main.body); + let mut function_context = FunctionContext::new(main.name.clone(), &main.parameters, &context); + function_context.codegen_function_body(&main.body); while let Some((src_function_id, dest_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; function_context.new_function(dest_id, function.name.clone(), &function.parameters); - function_context.codegen_expression(&function.body); + function_context.codegen_function_body(&function.body); } + + function_context.builder.finish() } impl<'a> FunctionContext<'a> { + /// Codegen a function's body and set its return value to that of its last parameter. + /// For functions returning nothing, this will be an empty list. + fn codegen_function_body(&mut self, body: &Expression) { + let return_value = self.codegen_expression(body); + let results = return_value.into_value_list(self); + self.builder.terminate_with_return(results); + } + fn codegen_expression(&mut self, expr: &Expression) -> Values { match expr { Expression::Ident(ident) => self.codegen_ident(ident), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs new file mode 100644 index 00000000000..03eb76dec50 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs @@ -0,0 +1,23 @@ +use std::fmt::Display; + +use crate::ssa_refactor::ir::function::Function; + +/// Contains the entire Ssa representation of the program +pub struct Ssa { + functions: Vec, +} + +impl Ssa { + pub fn new(functions: Vec) -> Self { + Self { functions } + } +} + +impl Display for Ssa { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for function in &self.functions { + writeln!(f, "{function}")?; + } + Ok(()) + } +} From 55ef8a2d3246a5edbf11a605c092b09151b120e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Fri, 28 Apr 2023 17:42:04 +0200 Subject: [PATCH 20/66] fix(wasm): add std after dependencies (#1245) --- crates/wasm/src/compile.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/wasm/src/compile.rs b/crates/wasm/src/compile.rs index cd34e685fa2..18bd30029bd 100644 --- a/crates/wasm/src/compile.rs +++ b/crates/wasm/src/compile.rs @@ -81,13 +81,13 @@ pub fn compile(args: JsValue) -> JsValue { let path = PathBuf::from(&options.entry_point); driver.create_local_crate(path, CrateType::Binary); - // We are always adding std lib implicitly. It comes bundled with binary. - add_noir_lib(&mut driver, "std"); - for dependency in options.optional_dependencies_set { add_noir_lib(&mut driver, dependency.as_str()); } + // We are always adding std lib implicitly. It comes bundled with binary. + add_noir_lib(&mut driver, "std"); + driver.check_crate(&options.compile_options).unwrap_or_else(|_| panic!("Crate check failed")); if options.contracts { From a0c6bfe824d25b0ad9a7452ee1c0da2723b94669 Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 28 Apr 2023 15:19:25 -0400 Subject: [PATCH 21/66] chore(ssa refactor): Fix loading from mutable parameters (#1248) * Implement first-class functions * Update crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs Co-authored-by: kevaundray * Implement intrinsics * Fix no return & duplicate main * bad git. remove duplicated functions * Remove Option in builder * Undo debug printing in driver * Fix loading from mutable parameters * Grammar * Fix storing to mutable arrays * Fix unused variable * Fix array loading * Change terminology --------- Co-authored-by: kevaundray --- .../src/ssa_refactor/ir/basic_block.rs | 9 +++ .../src/ssa_refactor/ir/instruction.rs | 4 +- .../src/ssa_refactor/ir/printer.rs | 2 +- .../src/ssa_refactor/ir/value.rs | 2 +- .../src/ssa_refactor/ssa_builder/mod.rs | 61 ++++++++----------- .../src/ssa_refactor/ssa_gen/context.rs | 40 ++++++++---- .../src/ssa_refactor/ssa_gen/mod.rs | 37 ++++++++--- .../src/ssa_refactor/ssa_gen/value.rs | 9 +++ 8 files changed, 105 insertions(+), 59 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index 13d1b3ca6f8..f6ca293f0fd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -77,4 +77,13 @@ impl BasicBlock { None => vec![].into_iter(), } } + + /// Removes the given instruction from this block if present or panics otherwise. + pub(crate) fn remove_instruction(&mut self, instruction: InstructionId) { + let index = + self.instructions.iter().position(|id| *id == instruction).unwrap_or_else(|| { + panic!("remove_instruction: No such instruction {instruction:?} in block") + }); + self.instructions.remove(index); + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 756c7ae5a13..545519e316f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -25,7 +25,7 @@ impl std::fmt::Display for Intrinsic { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Intrinsic::Println => write!(f, "println"), - Intrinsic::Sort => write!(f, "sort"), + Intrinsic::Sort => write!(f, "arraysort"), Intrinsic::ToBits(Endian::Big) => write!(f, "to_be_bits"), Intrinsic::ToBits(Endian::Little) => write!(f, "to_le_bits"), Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), @@ -39,7 +39,7 @@ impl Intrinsic { pub(crate) fn lookup(name: &str) -> Option { match name { "println" => Some(Intrinsic::Println), - "array_sort" => Some(Intrinsic::Sort), + "arraysort" => Some(Intrinsic::Sort), "to_le_radix" => Some(Intrinsic::ToRadix(Endian::Little)), "to_be_radix" => Some(Intrinsic::ToRadix(Endian::Big)), "to_le_bits" => Some(Intrinsic::ToBits(Endian::Little)), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index b0e6d787a6a..a0ab65bf639 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -143,7 +143,7 @@ pub(crate) fn display_instruction( Instruction::Allocate { size } => writeln!(f, "alloc {size} fields"), Instruction::Load { address } => writeln!(f, "load {}", show(*address)), Instruction::Store { address, value } => { - writeln!(f, "store {} at {}", show(*address), show(*value)) + writeln!(f, "store {} at {}", show(*value), show(*address)) } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index d7d8d8a41ab..868aee2199e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -12,7 +12,7 @@ pub(crate) type ValueId = Id; /// Value is the most basic type allowed in the IR. /// Transition Note: A Id is similar to `NodeId` in our previous IR. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub(crate) enum Value { /// This value was created due to an instruction /// diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index 35c918d645d..df80799c28a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -8,7 +8,10 @@ use crate::ssa_refactor::ir::{ value::{Value, ValueId}, }; -use super::{ir::instruction::Intrinsic, ssa_gen::Ssa}; +use super::{ + ir::instruction::{InstructionId, Intrinsic}, + ssa_gen::Ssa, +}; /// The per-function context for each ssa function being generated. /// @@ -18,7 +21,7 @@ use super::{ir::instruction::Intrinsic, ssa_gen::Ssa}; /// Contrary to the name, this struct has the capacity to build as many /// functions as needed, although it is limited to one function at a time. pub(crate) struct FunctionBuilder { - current_function: Function, + pub(super) current_function: Function, current_block: BasicBlockId, finished_functions: Vec, } @@ -114,12 +117,7 @@ impl FunctionBuilder { offset: ValueId, type_to_load: Type, ) -> ValueId { - if let Some(offset) = self.current_function.dfg.get_numeric_constant(offset) { - if !offset.is_zero() { - let offset = self.field_constant(offset); - address = self.insert_binary(address, BinaryOp::Add, offset); - } - }; + address = self.insert_binary(address, BinaryOp::Add, offset); self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] } @@ -205,32 +203,6 @@ impl FunctionBuilder { self.terminate_block_with(TerminatorInstruction::Return { return_values }); } - /// Mutates a load instruction into a store instruction. - /// - /// This function is used while generating ssa-form for assignments currently. - /// To re-use most of the expression infrastructure, the lvalue of an assignment - /// is compiled as an expression and to assign to it we replace the final load - /// (which should always be present to load a mutable value) with a store of the - /// assigned value. - pub(crate) fn mutate_load_into_store(&mut self, load_result: ValueId, value_to_store: ValueId) { - let (instruction, address) = match &self.current_function.dfg[load_result] { - Value::Instruction { instruction, .. } => { - match &self.current_function.dfg[*instruction] { - Instruction::Load { address } => (*instruction, *address), - other => { - panic!("mutate_load_into_store: Expected Load instruction, found {other:?}") - } - } - } - other => panic!("mutate_load_into_store: Expected Load instruction, found {other:?}"), - }; - - let store = Instruction::Store { address, value: value_to_store }; - self.current_function.dfg.replace_instruction(instruction, store); - // Clear the results of the previous load for safety - self.current_function.dfg.make_instruction_results(instruction, None); - } - /// Returns a ValueId pointing to the given function or imports the function /// into the current function if it was not already, and returns that ID. pub(crate) fn import_function(&mut self, function: FunctionId) -> ValueId { @@ -243,4 +215,25 @@ impl FunctionBuilder { Intrinsic::lookup(name) .map(|intrinsic| self.current_function.dfg.import_intrinsic(intrinsic)) } + + /// Removes the given instruction from the current block or panics otherwise. + pub(crate) fn remove_instruction_from_current_block(&mut self, instruction: InstructionId) { + self.current_function.dfg[self.current_block].remove_instruction(instruction); + } +} + +impl std::ops::Index for FunctionBuilder { + type Output = Value; + + fn index(&self, id: ValueId) -> &Self::Output { + &self.current_function.dfg[id] + } +} + +impl std::ops::Index for FunctionBuilder { + type Output = Instruction; + + fn index(&self, id: InstructionId) -> &Self::Output { + &self.current_function.dfg[id] + } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 3a730cca827..df54d5bd079 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -14,7 +14,7 @@ use crate::ssa_refactor::ir::types::Type; use crate::ssa_refactor::ir::value::ValueId; use crate::ssa_refactor::ssa_builder::FunctionBuilder; -use super::value::{Tree, Values}; +use super::value::{Tree, Value, Values}; // TODO: Make this a threadsafe queue so we can compile functions in parallel type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; @@ -63,8 +63,8 @@ impl<'a> FunctionContext<'a> { /// The returned parameter type list will be flattened, so any struct parameters will /// be returned as one entry for each field (recursively). fn add_parameters_to_scope(&mut self, parameters: &Parameters) { - for (id, _, _, typ) in parameters { - self.add_parameter_to_scope(*id, typ); + for (id, mutable, _, typ) in parameters { + self.add_parameter_to_scope(*id, typ, *mutable); } } @@ -72,14 +72,34 @@ impl<'a> FunctionContext<'a> { /// /// Single is in quotes here because in the case of tuple parameters, the tuple is flattened /// into a new parameter for each field recursively. - fn add_parameter_to_scope(&mut self, parameter_id: LocalId, parameter_type: &ast::Type) { + fn add_parameter_to_scope( + &mut self, + parameter_id: LocalId, + parameter_type: &ast::Type, + mutable: bool, + ) { // Add a separate parameter for each field type in 'parameter_type' - let parameter_value = - Self::map_type(parameter_type, |typ| self.builder.add_parameter(typ).into()); + let parameter_value = Self::map_type(parameter_type, |typ| { + let value = self.builder.add_parameter(typ); + if mutable { + self.new_mutable_variable(value) + } else { + value.into() + } + }); self.definitions.insert(parameter_id, parameter_value); } + /// Allocate a single slot of memory and store into it the given initial value of the variable. + /// Always returns a Value::Mutable wrapping the allocate instruction. + pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { + let alloc = self.builder.insert_allocate(1); + self.builder.insert_store(alloc, value_to_store); + let typ = self.builder.type_of_value(value_to_store); + Value::Mutable(alloc, typ) + } + /// Maps the given type to a Tree of the result type. /// /// This can be used to (for example) flatten a tuple type, creating @@ -224,12 +244,8 @@ impl<'a> FunctionContext<'a> { } } (Tree::Leaf(lhs), Tree::Leaf(rhs)) => { - // Re-evaluating these should have no effect - let (lhs, rhs) = (lhs.eval(self), rhs.eval(self)); - - // Expect lhs to be previously evaluated. If it is a load we need to undo - // the load to get the address to store to. - self.builder.mutate_load_into_store(lhs, rhs); + let (lhs, rhs) = (lhs.eval_reference(), rhs.eval(self)); + self.builder.insert_store(lhs, rhs); } (lhs, rhs) => { unreachable!( diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 8b168b08836..4b93a7e1185 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -161,16 +161,21 @@ impl<'a> FunctionContext<'a> { fn codegen_index(&mut self, index: &ast::Index) -> Values { let array = self.codegen_non_tuple_expression(&index.collection); - self.codegen_array_index(array, &index.index, &index.element_type) + self.codegen_array_index(array, &index.index, &index.element_type, true) } /// This is broken off from codegen_index so that it can also be - /// used to codegen a LValue::Index + /// used to codegen a LValue::Index. + /// + /// Set load_result to true to load from each relevant index of the array + /// (it may be multiple in the case of tuples). Set it to false to instead + /// return a reference to each element, for use with the store instruction. fn codegen_array_index( &mut self, array: super::ir::value::ValueId, index: &ast::Expression, element_type: &ast::Type, + load_result: bool, ) -> Values { let base_offset = self.codegen_non_tuple_expression(index); @@ -183,7 +188,12 @@ impl<'a> FunctionContext<'a> { Self::map_type(element_type, |typ| { let offset = self.make_offset(base_index, field_index); field_index += 1; - self.builder.insert_load(array, offset, typ).into() + if load_result { + self.builder.insert_load(array, offset, typ) + } else { + self.builder.insert_binary(array, BinaryOp::Add, offset) + } + .into() }) } @@ -292,10 +302,7 @@ impl<'a> FunctionContext<'a> { if let_expr.mutable { values.map_mut(|value| { let value = value.eval(self); - // Size is always 1 here since we're recursively unpacking tuples - let alloc = self.builder.insert_allocate(1); - self.builder.insert_store(alloc, value); - alloc.into() + Tree::Leaf(self.new_mutable_variable(value)) }); } @@ -312,16 +319,28 @@ impl<'a> FunctionContext<'a> { fn codegen_assign(&mut self, assign: &ast::Assign) -> Values { let lhs = self.codegen_lvalue(&assign.lvalue); let rhs = self.codegen_expression(&assign.expression); + self.assign(lhs, rhs); self.unit_value() } fn codegen_lvalue(&mut self, lvalue: &ast::LValue) -> Values { match lvalue { - ast::LValue::Ident(ident) => self.codegen_ident(ident), + ast::LValue::Ident(ident) => { + // Do not .eval the Values here! We do not want to load from any references within + // since we want to return the references instead + match &ident.definition { + ast::Definition::Local(id) => self.lookup(*id), + other => panic!("Unexpected definition found for mutable value: {other}"), + } + } ast::LValue::Index { array, index, element_type, location: _ } => { + // Note that unlike the Ident case, we're .eval'ing the array here. + // This is because arrays are already references and thus a mutable reference + // to an array would be a Value::Mutable( Value::Mutable ( address ) ), and we + // only need the inner mutable value. let array = self.codegen_lvalue(array).into_leaf().eval(self); - self.codegen_array_index(array, index, element_type) + self.codegen_array_index(array, index, element_type, false) } ast::LValue::MemberAccess { object, field_index } => { let object = self.codegen_lvalue(object); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index 410e375fcd6..fa27e70ad9b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -32,6 +32,15 @@ impl Value { } } } + + /// Evaluates the value, returning a reference to the mutable variable found within + /// if possible. Compared to .eval, this method will not load from self if it is Value::Mutable. + pub(super) fn eval_reference(self) -> IrValueId { + match self { + Value::Normal(value) => value, + Value::Mutable(address, _) => address, + } + } } pub(super) type Values = Tree; From 66b7105d0c66cc679580f5b751076cf3da2cd20a Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Fri, 28 Apr 2023 21:19:09 +0100 Subject: [PATCH 22/66] chore(ci): Utilize new workflow to build binaries (#1250) * chore(ci): Utilize new workflow to build binaries * Update release.yml --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .github/workflows/release.yml | 38 ++++------------------------------- 1 file changed, 4 insertions(+), 34 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c7eb6df168e..f242f10d971 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -58,8 +58,8 @@ jobs: git commit -m 'chore: Update lockfile' git push - build-linux-binaries: - name: Build linux binaries + build-binaries: + name: Build binaries needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} runs-on: ubuntu-latest @@ -67,41 +67,11 @@ jobs: - name: Dispatch to build-nargo uses: benc-uk/workflow-dispatch@v1 with: - workflow: publish-linux.yml + workflow: publish.yml repo: noir-lang/build-nargo ref: master token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' - - build-windows-binaries: - name: Build windows binaries - needs: [release-please] - if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest - steps: - - name: Dispatch to build-nargo - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: publish-x86_64-pc-windows-wasm.yml - repo: noir-lang/build-nargo - ref: master - token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' - - build-mac-binaries: - name: Build mac binaries - needs: [release-please] - if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest - steps: - - name: Dispatch to build-nargo - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: publish-apple-darwin-wasm.yml - repo: noir-lang/build-nargo - ref: master - token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' + inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "publish": true }' publish-wasm: name: Publish noir_wasm package From 049773bd4d08afaf70c3cb1e4c658df0e6f50ac6 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Fri, 28 Apr 2023 21:56:02 +0100 Subject: [PATCH 23/66] chore(noir): Release 0.5.0 (#1202) * chore(noir): Release 0.5.0 * chore: Update lockfile --- CHANGELOG.md | 18 ++++++++++++++++++ Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- flake.nix | 2 +- 4 files changed, 31 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e03152c03a..292c08fb8c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [0.5.0](https://github.com/noir-lang/noir/compare/v0.4.1...v0.5.0) (2023-04-28) + + +### ⚠ BREAKING CHANGES + +* Switch to aztec_backend that uses upstream BB & UltraPlonk ([#1114](https://github.com/noir-lang/noir/issues/1114)) + +### Features + +* **noir:** added `distinct` keyword ([#1219](https://github.com/noir-lang/noir/issues/1219)) ([3a65f30](https://github.com/noir-lang/noir/commit/3a65f304c25e8239f9735ce1e6dee29d7eecc244)) +* **noir:** added assert keyword ([#1227](https://github.com/noir-lang/noir/issues/1227)) ([0dc2cac](https://github.com/noir-lang/noir/commit/0dc2cac5bc26d277a0e6377fd774e0ec9c8d3531)) +* Switch to aztec_backend that uses upstream BB & UltraPlonk ([#1114](https://github.com/noir-lang/noir/issues/1114)) ([f14fe0b](https://github.com/noir-lang/noir/commit/f14fe0b97e75eb5be39a48675149cf08d718abf6)) + + +### Bug Fixes + +* **wasm:** add std after dependencies ([#1245](https://github.com/noir-lang/noir/issues/1245)) ([55ef8a2](https://github.com/noir-lang/noir/commit/55ef8a2d3246a5edbf11a605c092b09151b120e6)) + ## [0.4.1](https://github.com/noir-lang/noir/compare/v0.4.0...v0.4.1) (2023-04-20) diff --git a/Cargo.lock b/Cargo.lock index 85b5c12aa46..a62d34f67a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -118,7 +118,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.4.1" +version = "0.5.0" dependencies = [ "generational-arena", ] @@ -1178,7 +1178,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.4.1" +version = "0.5.0" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1674,7 +1674,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.4.1" +version = "0.5.0" [[package]] name = "itertools" @@ -1884,7 +1884,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "iter-extended", @@ -1898,7 +1898,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "assert_cmd", @@ -1930,7 +1930,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "build-data", @@ -1946,7 +1946,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "iter-extended", @@ -1958,7 +1958,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "clap", @@ -1973,7 +1973,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.4.1" +version = "0.5.0" dependencies = [ "chumsky", "codespan", @@ -1984,7 +1984,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "arena", @@ -2000,7 +2000,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.4.1" +version = "0.5.0" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index 26eec846ef9..2ae36eee6f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.4.1" +version = "0.5.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" diff --git a/flake.nix b/flake.nix index 28859ebb2b9..03d8f6be43f 100644 --- a/flake.nix +++ b/flake.nix @@ -106,7 +106,7 @@ commonArgs = environment // { pname = "noir"; # x-release-please-start-version - version = "0.4.1"; + version = "0.5.0"; # x-release-please-end # Use our custom stdenv to build and test our Rust project From 41d96ae9bbb9ce7010451cae5dc1f66d5e57d45b Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 1 May 2023 15:49:19 +0100 Subject: [PATCH 24/66] chore(nargo): replace `aztec_backend` with `acvm-backend-barretenberg` (#1226) * chore: replace `aztec_backend` with `acvm-backend-barretenberg` * feat: update to ACVM 0.10.0 * chore: move `ComputeMerkleRoot` to same match arm as `HashToField128Security` * chore: bump backend commit * feat: update stdlib to use new merkle black box function * fix: bump commit of barretenberg to match acvm-backend-barretenberg * feat: update `merkle_insert` to use new `compute_merkle_root` function * chore: update to use ACVM 0.10.3 * chore: bump backend commit --- Cargo.lock | 144 ++++-------------- Cargo.toml | 2 +- crates/nargo/src/ops/execute.rs | 7 +- crates/nargo_cli/Cargo.toml | 7 +- crates/nargo_cli/src/backends.rs | 14 +- crates/nargo_cli/src/cli/check_cmd.rs | 2 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 2 +- crates/nargo_cli/src/cli/compile_cmd.rs | 2 +- crates/nargo_cli/src/cli/execute_cmd.rs | 2 +- crates/nargo_cli/src/cli/gates_cmd.rs | 2 +- crates/nargo_cli/src/cli/print_acir_cmd.rs | 2 +- crates/nargo_cli/src/cli/prove_cmd.rs | 2 +- crates/nargo_cli/src/cli/test_cmd.rs | 4 +- crates/nargo_cli/src/cli/verify_cmd.rs | 2 +- .../tests/test_data/merkle_insert/src/main.nr | 4 +- crates/noirc_evaluator/src/lib.rs | 2 +- .../src/ssa/acir_gen/constraints.rs | 20 +-- .../src/ssa/acir_gen/internal_var.rs | 2 +- .../src/ssa/acir_gen/operations/binary.rs | 4 +- .../src/ssa/acir_gen/operations/bitwise.rs | 4 +- .../src/ssa/acir_gen/operations/cmp.rs | 4 +- .../src/ssa/acir_gen/operations/intrinsics.rs | 2 +- .../src/ssa/acir_gen/operations/not.rs | 2 +- .../src/ssa/acir_gen/operations/sort.rs | 5 +- crates/noirc_evaluator/src/ssa/builtin.rs | 18 ++- flake.lock | 6 +- noir_stdlib/src/merkle.nr | 13 +- 27 files changed, 93 insertions(+), 187 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a62d34f67a7..b281ed5a9d0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "acir" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "018148d69bf14422b1c1d62909a241af2a7f51fec064feb2b01de88fb02b11b8" +checksum = "510b65efd4d20bf266185ce0a5dc7d29bcdd196a6a1835c20908fd88040de76c" dependencies = [ "acir_field", "flate2", @@ -16,9 +16,9 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d40dac25cf6be6335dd86286caeac859afd0dc74a4a75c64eed041b0f00a278" +checksum = "f4f032e710c67fd146caedc8fe1dea6e95f01ab59453e42d59b604a51fef3dfe" dependencies = [ "ark-bn254", "ark-ff", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "acvm" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17b7bc8f2b2215075b8e080ba3a0b8b7d759f04bc44b27e5bb8d845f4c77f20" +checksum = "2611266039740ffd1978f23258bd6ce3166c22cf15b8227685c2f3bb20ae2ee0" dependencies = [ "acir", "acvm_stdlib", @@ -46,11 +46,30 @@ dependencies = [ "thiserror", ] +[[package]] +name = "acvm-backend-barretenberg" +version = "0.0.0" +source = "git+https://github.com/noir-lang/aztec_backend?rev=c9fb9e806f1400a2ff7594a0669bec56025220bb#c9fb9e806f1400a2ff7594a0669bec56025220bb" +dependencies = [ + "acvm", + "barretenberg-sys", + "blake2", + "dirs 3.0.2", + "futures-util", + "getrandom", + "indicatif", + "pkg-config", + "reqwest", + "rust-embed", + "tokio", + "wasmer", +] + [[package]] name = "acvm_stdlib" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ce2d19a9d1e7ff1bf415ed909b43031e33ef6df21be70e470bb1817b3e6989" +checksum = "a5ec51160c66eba75dc15a028a2391675386fd395b3897478d89a386c64a48dd" dependencies = [ "acir", ] @@ -305,27 +324,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "barretenberg_static_lib" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" -dependencies = [ - "barretenberg-sys", - "common", -] - -[[package]] -name = "barretenberg_wasm" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" -dependencies = [ - "common", - "getrandom", - "pkg-config", - "rust-embed", - "wasmer", -] - [[package]] name = "base64" version = "0.21.0" @@ -618,21 +616,6 @@ dependencies = [ "tracing-error", ] -[[package]] -name = "common" -version = "0.1.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=e3d4504f15e1295e637c4da80b1d08c87c267c45#e3d4504f15e1295e637c4da80b1d08c87c267c45" -dependencies = [ - "acvm", - "blake2", - "dirs 3.0.2", - "futures-util", - "indicatif", - "reqwest", - "sled", - "tokio", -] - [[package]] name = "console" version = "0.15.5" @@ -1202,16 +1185,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -1285,15 +1258,6 @@ dependencies = [ "slab", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "generational-arena" version = "0.2.8" @@ -1761,16 +1725,6 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" -[[package]] -name = "lock_api" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" -dependencies = [ - "autocfg", - "scopeguard", -] - [[package]] name = "log" version = "0.4.17" @@ -1901,10 +1855,9 @@ name = "nargo_cli" version = "0.5.0" dependencies = [ "acvm", + "acvm-backend-barretenberg", "assert_cmd", "assert_fs", - "barretenberg_static_lib", - "barretenberg_wasm", "build-data", "cfg-if 1.0.0", "clap", @@ -2124,31 +2077,6 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", -] - [[package]] name = "paste" version = "1.0.12" @@ -2941,22 +2869,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "sled" -version = "0.34.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935" -dependencies = [ - "crc32fast", - "crossbeam-epoch", - "crossbeam-utils", - "fs2", - "fxhash", - "libc", - "log", - "parking_lot", -] - [[package]] name = "smallvec" version = "1.10.0" diff --git a/Cargo.toml b/Cargo.toml index 2ae36eee6f1..34dfe88e2ba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ edition = "2021" rust-version = "1.66" [workspace.dependencies] -acvm = "0.9.0" +acvm = "0.10.3" arena = { path = "crates/arena" } fm = { path = "crates/fm" } iter-extended = { path = "crates/iter-extended" } diff --git a/crates/nargo/src/ops/execute.rs b/crates/nargo/src/ops/execute.rs index eb82df60d41..e4c8a5afbb5 100644 --- a/crates/nargo/src/ops/execute.rs +++ b/crates/nargo/src/ops/execute.rs @@ -1,5 +1,5 @@ -use acvm::PartialWitnessGenerator; use acvm::{acir::circuit::Circuit, pwg::block::Blocks}; +use acvm::{PartialWitnessGenerator, PartialWitnessGeneratorStatus}; use noirc_abi::WitnessMap; use crate::NargoError; @@ -10,9 +10,8 @@ pub fn execute_circuit( mut initial_witness: WitnessMap, ) -> Result { let mut blocks = Blocks::default(); - let (unresolved_opcodes, oracles) = - backend.solve(&mut initial_witness, &mut blocks, circuit.opcodes)?; - if !unresolved_opcodes.is_empty() || !oracles.is_empty() { + let solver_status = backend.solve(&mut initial_witness, &mut blocks, circuit.opcodes)?; + if matches!(solver_status, PartialWitnessGeneratorStatus::RequiresOracleData { .. }) { todo!("Add oracle support to nargo execute") } diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 40ab4b92459..2568f2a86f5 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -37,8 +37,7 @@ termcolor = "1.1.2" color-eyre = "0.6.2" # Backends -aztec_backend = { optional = true, package = "barretenberg_static_lib", git = "https://github.com/noir-lang/aztec_backend", rev = "e3d4504f15e1295e637c4da80b1d08c87c267c45" } -aztec_wasm_backend = { optional = true, package = "barretenberg_wasm", git = "https://github.com/noir-lang/aztec_backend", rev = "e3d4504f15e1295e637c4da80b1d08c87c267c45" } +acvm-backend-barretenberg = { git = "https://github.com/noir-lang/aztec_backend", rev = "c9fb9e806f1400a2ff7594a0669bec56025220bb", default-features=false } [dev-dependencies] tempdir = "0.3.7" @@ -49,6 +48,6 @@ predicates = "2.1.5" [features] default = ["plonk_bn254"] # The plonk backend can only use bn254, so we do not specify the field -plonk_bn254 = ["aztec_backend"] -plonk_bn254_wasm = ["aztec_wasm_backend"] +plonk_bn254 = ["acvm-backend-barretenberg/native"] +plonk_bn254_wasm = ["acvm-backend-barretenberg/wasm"] diff --git a/crates/nargo_cli/src/backends.rs b/crates/nargo_cli/src/backends.rs index e1113279f80..bbec5c99006 100644 --- a/crates/nargo_cli/src/backends.rs +++ b/crates/nargo_cli/src/backends.rs @@ -1,14 +1,8 @@ -cfg_if::cfg_if! { - if #[cfg(feature = "plonk_bn254")] { - pub(crate) use aztec_backend::Plonk as ConcreteBackend; - } else if #[cfg(feature = "plonk_bn254_wasm")] { - pub(crate) use aztec_wasm_backend::Plonk as ConcreteBackend; - } else { - compile_error!("please specify a backend to compile with"); - } -} +pub(crate) use acvm_backend_barretenberg::Barretenberg as ConcreteBackend; + +#[cfg(not(any(feature = "plonk_bn254", feature = "plonk_bn254_wasm")))] +compile_error!("please specify a backend to compile with"); -// As we have 3 feature flags we must test all 3 potential pairings to ensure they're mutually exclusive. #[cfg(all(feature = "plonk_bn254", feature = "plonk_bn254_wasm"))] compile_error!( "feature \"plonk_bn254\" and feature \"plonk_bn254_wasm\" cannot be enabled at the same time" diff --git a/crates/nargo_cli/src/cli/check_cmd.rs b/crates/nargo_cli/src/cli/check_cmd.rs index 557093444a1..3049c830def 100644 --- a/crates/nargo_cli/src/cli/check_cmd.rs +++ b/crates/nargo_cli/src/cli/check_cmd.rs @@ -24,7 +24,7 @@ pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliErro } fn check_from_path>(p: P, compile_options: &CompileOptions) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let mut driver = Resolver::resolve_root_manifest(p.as_ref(), backend.np_language())?; diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index 319a5722708..f23502a15b5 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -19,7 +19,7 @@ pub(crate) struct CodegenVerifierCommand { } pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); // TODO(#1201): Should this be a utility function? let circuit_build_path = args diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index 50c21486385..78b52003166 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -30,7 +30,7 @@ pub(crate) struct CompileCommand { pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { let circuit_dir = config.program_dir.join(TARGET_DIR); - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); // If contracts is set we're compiling every function in a 'contract' rather than just 'main'. if args.contracts { diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index 9d1429bbda7..adeefc860a5 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -46,7 +46,7 @@ fn execute_with_path( program_dir: &Path, compile_options: &CompileOptions, ) -> Result<(Option, WitnessMap), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let compiled_program = compile_circuit(&backend, program_dir, compile_options)?; diff --git a/crates/nargo_cli/src/cli/gates_cmd.rs b/crates/nargo_cli/src/cli/gates_cmd.rs index a5093b4d775..9fe9f5c7a53 100644 --- a/crates/nargo_cli/src/cli/gates_cmd.rs +++ b/crates/nargo_cli/src/cli/gates_cmd.rs @@ -23,7 +23,7 @@ fn count_gates_with_path>( program_dir: P, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; let num_opcodes = compiled_program.circuit.opcodes.len(); diff --git a/crates/nargo_cli/src/cli/print_acir_cmd.rs b/crates/nargo_cli/src/cli/print_acir_cmd.rs index 589cc490f40..38b841121bc 100644 --- a/crates/nargo_cli/src/cli/print_acir_cmd.rs +++ b/crates/nargo_cli/src/cli/print_acir_cmd.rs @@ -22,7 +22,7 @@ fn print_acir_with_path>( program_dir: P, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; println!("{}", compiled_program.circuit); diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index fd60f004e2b..cecdee23fee 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -65,7 +65,7 @@ pub(crate) fn prove_with_path>( check_proof: bool, compile_options: &CompileOptions, ) -> Result, CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let preprocessed_program = match circuit_build_path { Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index d168e6c39ca..65f8265a862 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -32,7 +32,7 @@ fn run_tests( test_name: &str, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let mut driver = Resolver::resolve_root_manifest(program_dir, backend.np_language())?; @@ -79,7 +79,7 @@ fn run_test( driver: &Driver, config: &CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let program = driver .compile_no_check(config, main) diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index cf2e4859091..07b7e351ee9 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -43,7 +43,7 @@ fn verify_with_path>( circuit_build_path: Option

, compile_options: CompileOptions, ) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend; + let backend = crate::backends::ConcreteBackend::default(); let preprocessed_program = match circuit_build_path { Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, diff --git a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr index 9d612977fa8..3ab4efb64c0 100644 --- a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr +++ b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr @@ -12,8 +12,8 @@ fn main( let old_leaf_exists = std::merkle::check_membership(old_root, old_leaf, index, old_hash_path); constrain old_leaf_exists == 1; constrain old_root == std::merkle::compute_root_from_leaf(old_leaf, index, old_hash_path); - let new_leaf_exists = std::merkle::check_membership(new_root, leaf, index, old_hash_path); - constrain new_leaf_exists == 1; + let calculated_root = std::merkle::compute_merkle_root(leaf, index, old_hash_path); + constrain new_root == calculated_root; let h = std::hash::mimc_bn254(mimc_input); // Regression test for PR #891 diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 64a02061b0f..438ada0167c 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -174,7 +174,7 @@ impl Evaluator { let inter_var_witness = self.add_witness_to_cs(); // Link that witness to the arithmetic gate - let constraint = &arithmetic_gate - &inter_var_witness; + let constraint = &arithmetic_gate - inter_var_witness; self.opcodes.push(AcirOpcode::Arithmetic(constraint)); inter_var_witness } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs index 8257e0c9f9a..11371dc54a6 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs @@ -31,7 +31,7 @@ pub(crate) fn mul_with_witness( let a_arith; let a_arith = if !a.mul_terms.is_empty() && !b.is_const() { let a_witness = evaluator.create_intermediate_variable(a.clone()); - a_arith = Expression::from(&a_witness); + a_arith = Expression::from(a_witness); &a_arith } else { a @@ -42,7 +42,7 @@ pub(crate) fn mul_with_witness( a_arith } else { let b_witness = evaluator.create_intermediate_variable(b.clone()); - b_arith = Expression::from(&b_witness); + b_arith = Expression::from(b_witness); &b_arith } } else { @@ -54,9 +54,9 @@ pub(crate) fn mul_with_witness( //a*b pub(crate) fn mul(a: &Expression, b: &Expression) -> Expression { if a.is_const() { - return b * &a.q_c; + return b * a.q_c; } else if b.is_const() { - return a * &b.q_c; + return a * b.q_c; } else if !(a.is_linear() && b.is_linear()) { unreachable!("Can only multiply linear terms"); } @@ -125,9 +125,9 @@ pub(crate) fn subtract(a: &Expression, k: FieldElement, b: &Expression) -> Expre // TODO in either case, we can put this in ACIR, if its useful pub(crate) fn add(a: &Expression, k: FieldElement, b: &Expression) -> Expression { if a.is_const() { - return (b * &k) + &a.q_c; + return (b * k) + a.q_c; } else if b.is_const() { - return a.clone() + &(k * b.q_c); + return a.clone() + (k * b.q_c); } let mut output = Expression::from_field(a.q_c + k * b.q_c); @@ -497,7 +497,7 @@ pub(crate) fn evaluate_truncate( if let Some(a_c) = lhs.to_const() { let mut a_big = BigUint::from_bytes_be(&a_c.to_be_bytes()); a_big %= exp_big; - return Expression::from(&FieldElement::from_be_bytes_reduce(&a_big.to_bytes_be())); + return Expression::from(FieldElement::from_be_bytes_reduce(&a_big.to_bytes_be())); } let exp = FieldElement::from_be_bytes_reduce(&exp_big.to_bytes_be()); @@ -524,7 +524,7 @@ pub(crate) fn evaluate_truncate( let my_constraint = add(&res, -FieldElement::one(), lhs); evaluator.push_opcode(AcirOpcode::Arithmetic(my_constraint)); - Expression::from(&b_witness) + Expression::from(b_witness) } pub(crate) fn evaluate_udiv( @@ -552,8 +552,8 @@ pub(crate) fn evaluate_udiv( //range check q<=a try_range_constraint(q_witness, bit_size, evaluator); // a-b*q-r = 0 - let mut d = mul_with_witness(evaluator, rhs, &Expression::from(&q_witness)); - d = add(&d, FieldElement::one(), &Expression::from(&r_witness)); + let mut d = mul_with_witness(evaluator, rhs, &Expression::from(q_witness)); + d = add(&d, FieldElement::one(), &Expression::from(r_witness)); d = mul_with_witness(evaluator, &d, predicate); let div_euclidean = subtract(&pa, FieldElement::one(), &d); diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs index 8e6e16776a9..27d6b0ec25b 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var.rs @@ -98,7 +98,7 @@ impl InternalVar { /// Expression, this method is infallible. pub(crate) fn from_witness(witness: Witness) -> InternalVar { InternalVar { - expression: Expression::from(&witness), + expression: Expression::from(witness), cached_witness: Some(witness), id: None, } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs index bf1f59391f9..87280eb1fde 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs @@ -146,12 +146,12 @@ pub(crate) fn evaluate( if r_value.is_zero() { panic!("Panic - division by zero"); } else { - (l_c.expression() * &r_value.inverse()).into() + (l_c.expression() * r_value.inverse()).into() } } else { //TODO avoid creating witnesses here. let x_witness = acir_gen.var_cache.get_or_compute_witness(r_c, evaluator).expect("unexpected constant expression"); - let inverse = Expression::from(&constraints::evaluate_inverse( + let inverse = Expression::from(constraints::evaluate_inverse( x_witness, &predicate, evaluator, )); InternalVar::from(constraints::mul_with_witness( diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs index 947cf93edd9..f8ca271835e 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs @@ -155,9 +155,9 @@ pub(super) fn evaluate_bitwise( constraints::subtract( &Expression::from_field(max), FieldElement::one(), - &Expression::from(&result), + &Expression::from(result), ) } else { - Expression::from(&result) + Expression::from(result) } } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs index 4abd34f6e01..0f8091e2f6f 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/cmp.rs @@ -69,7 +69,7 @@ pub(super) fn evaluate_neq( .get_or_compute_witness(x, evaluator) .expect("unexpected constant expression"); - return Expression::from(&constraints::evaluate_zero_equality(x_witness, evaluator)); + return Expression::from(constraints::evaluate_zero_equality(x_witness, evaluator)); } // Arriving here means that `lhs` and `rhs` are not Arrays @@ -95,7 +95,7 @@ pub(super) fn evaluate_neq( .var_cache .get_or_compute_witness(x, evaluator) .expect("unexpected constant expression"); - Expression::from(&constraints::evaluate_zero_equality(x_witness, evaluator)) + Expression::from(constraints::evaluate_zero_equality(x_witness, evaluator)) } } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs index 7d6f7e2c32c..ea7d3d9c6c0 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/intrinsics.rs @@ -107,7 +107,7 @@ pub(crate) fn evaluate( } outputs = prepare_outputs(&mut acir_gen.memory, instruction_id, array.len, ctx, evaluator); - let out_expr: Vec = outputs.iter().map(|w| w.into()).collect(); + let out_expr: Vec = outputs.iter().map(|w| (*w).into()).collect(); for i in 0..(out_expr.len() - 1) { bound_constraint_with_offset( &out_expr[i], diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs index ff8bb26f788..76ad7c93a88 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/not.rs @@ -19,7 +19,7 @@ pub(crate) fn evaluate( let l_c = var_cache.get_or_compute_internal_var_unwrap(*value, evaluator, ctx); Some( constraints::subtract( - &Expression::from(&FieldElement::from(a)), + &Expression::from(FieldElement::from(a)), FieldElement::one(), l_c.expression(), ) diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs index 04524959fbe..ffcbf1ea7c0 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs @@ -119,6 +119,7 @@ mod test { acir::{circuit::opcodes::BlackBoxFuncCall, native_types::Witness}, pwg::block::Blocks, FieldElement, OpcodeResolution, OpcodeResolutionError, PartialWitnessGenerator, + PartialWitnessGeneratorStatus, }; use crate::{ @@ -180,10 +181,10 @@ mod test { // compute the network output by solving the constraints let backend = MockBackend {}; let mut blocks = Blocks::default(); - let (unresolved_opcodes, oracles) = backend + let solver_status = backend .solve(&mut solved_witness, &mut blocks, eval.opcodes.clone()) .expect("Could not solve permutation constraints"); - assert!(unresolved_opcodes.is_empty() && oracles.is_empty(), "Incomplete solution"); + assert_eq!(solver_status, PartialWitnessGeneratorStatus::Solved, "Incomplete solution"); let mut b_val = Vec::new(); for i in 0..output.len() { b_val.push(solved_witness[&b_wit[i]]); diff --git a/crates/noirc_evaluator/src/ssa/builtin.rs b/crates/noirc_evaluator/src/ssa/builtin.rs index 8248322c488..8e402343bb5 100644 --- a/crates/noirc_evaluator/src/ssa/builtin.rs +++ b/crates/noirc_evaluator/src/ssa/builtin.rs @@ -77,10 +77,10 @@ impl Opcode { | BlackBoxFunc::Pedersen | BlackBoxFunc::FixedBaseScalarMul => BigUint::zero(), // Verify returns zero or one - BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::EcdsaSecp256k1 - | BlackBoxFunc::MerkleMembership => BigUint::one(), - BlackBoxFunc::HashToField128Security => ObjectType::native_field().max_size(), + BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 => BigUint::one(), + BlackBoxFunc::ComputeMerkleRoot | BlackBoxFunc::HashToField128Security => { + ObjectType::native_field().max_size() + } BlackBoxFunc::AES => { todo!("ICE: AES is unimplemented") } @@ -111,11 +111,13 @@ impl Opcode { BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => { (32, ObjectType::unsigned_integer(8)) } - BlackBoxFunc::HashToField128Security => (1, ObjectType::native_field()), + BlackBoxFunc::ComputeMerkleRoot | BlackBoxFunc::HashToField128Security => { + (1, ObjectType::native_field()) + } // See issue #775 on changing this to return a boolean - BlackBoxFunc::MerkleMembership - | BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::EcdsaSecp256k1 => (1, ObjectType::native_field()), + BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 => { + (1, ObjectType::native_field()) + } BlackBoxFunc::Pedersen => (2, ObjectType::native_field()), BlackBoxFunc::FixedBaseScalarMul => (2, ObjectType::native_field()), BlackBoxFunc::RANGE | BlackBoxFunc::AND | BlackBoxFunc::XOR => { diff --git a/flake.lock b/flake.lock index 7c01326f86d..6a6aff10f73 100644 --- a/flake.lock +++ b/flake.lock @@ -10,11 +10,11 @@ ] }, "locked": { - "lastModified": 1682345890, - "narHash": "sha256-ZsInK9Iy81MaCugouU3ifa5Vw2GKlJK9MxCU/LF8bIw=", + "lastModified": 1682626614, + "narHash": "sha256-TC535frlYLUTDZ2iHAtUtKpMJWngL1MFxzEXhOfeCo8=", "owner": "AztecProtocol", "repo": "barretenberg", - "rev": "87aeb375d7b434e0faf47abb79f97753ab760987", + "rev": "7b5598890c1fa4ee485a4a0015fcb23b5389392e", "type": "github" }, "original": { diff --git a/noir_stdlib/src/merkle.nr b/noir_stdlib/src/merkle.nr index 9f7c5d5b130..a47ce86c94b 100644 --- a/noir_stdlib/src/merkle.nr +++ b/noir_stdlib/src/merkle.nr @@ -5,16 +5,15 @@ // and the hashpath proves this // Currently we assume that it is a binary tree, so depth k implies a width of 2^k // XXX: In the future we can add an arity parameter -#[foreign(merkle_membership)] -fn check_membership(_root : Field, _leaf : Field, _index : Field, _hash_path: [Field]) -> Field {} - - -#[alternative(merkle_membership)] -fn check_membership_in_noir(root : Field, leaf : Field, index : Field, hash_path: [Field]) -> Field { - (compute_root_from_leaf(leaf, index, hash_path) == root) as Field +fn check_membership(_root : Field, _leaf : Field, _index : Field, _hash_path: [Field]) -> Field { + (compute_merkle_root(_leaf, _index, _hash_path) == _root) as Field } +#[foreign(compute_merkle_root)] +fn compute_merkle_root(_leaf : Field, _index : Field, _hash_path: [Field]) -> Field {} + // Returns the root of the tree from the provided leaf and its hashpath, using pedersen hash +#[alternative(compute_merkle_root)] fn compute_root_from_leaf(leaf : Field, index : Field, hash_path: [Field]) -> Field { let n = hash_path.len(); let index_bits = index.to_le_bits(n as u32); From 2a5aa52435294ddeda5b4506c3117cbd164ca2ff Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 1 May 2023 17:09:23 +0100 Subject: [PATCH 25/66] fix: Add Poseidon examples into integration tests (#1257) add poseidon examples into nargo_cli --- .../tests/test_data/poseidon_bn254_hash/Nargo.toml | 0 .../tests/test_data/poseidon_bn254_hash/Prover.toml | 0 .../tests/test_data/poseidon_bn254_hash/src/main.nr | 0 .../tests/test_data/poseidonsponge_x5_254/Nargo.toml | 0 .../tests/test_data/poseidonsponge_x5_254/Prover.toml | 0 .../tests/test_data/poseidonsponge_x5_254/src/main.nr | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename crates/{nargo => nargo_cli}/tests/test_data/poseidon_bn254_hash/Nargo.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidon_bn254_hash/Prover.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidon_bn254_hash/src/main.nr (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidonsponge_x5_254/Nargo.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidonsponge_x5_254/Prover.toml (100%) rename crates/{nargo => nargo_cli}/tests/test_data/poseidonsponge_x5_254/src/main.nr (100%) diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/Nargo.toml b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Nargo.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidon_bn254_hash/Nargo.toml rename to crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Nargo.toml diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Prover.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidon_bn254_hash/Prover.toml rename to crates/nargo_cli/tests/test_data/poseidon_bn254_hash/Prover.toml diff --git a/crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr similarity index 100% rename from crates/nargo/tests/test_data/poseidon_bn254_hash/src/main.nr rename to crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/Nargo.toml b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Nargo.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidonsponge_x5_254/Nargo.toml rename to crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Nargo.toml diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/Prover.toml b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Prover.toml similarity index 100% rename from crates/nargo/tests/test_data/poseidonsponge_x5_254/Prover.toml rename to crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/Prover.toml diff --git a/crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr similarity index 100% rename from crates/nargo/tests/test_data/poseidonsponge_x5_254/src/main.nr rename to crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr From 7f6dede414c46790545b1994713d1976c5623711 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 1 May 2023 19:53:28 +0100 Subject: [PATCH 26/66] chore(noir): Release 0.5.1 (#1264) * chore(noir): Release 0.5.1 * chore: Update lockfile --- CHANGELOG.md | 8 ++++++++ Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- flake.nix | 2 +- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 292c08fb8c2..c554330a470 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.1](https://github.com/noir-lang/noir/compare/v0.5.0...v0.5.1) (2023-05-01) + + +### Bug Fixes + +* Add Poseidon examples into integration tests ([#1257](https://github.com/noir-lang/noir/issues/1257)) ([2a5aa52](https://github.com/noir-lang/noir/commit/2a5aa52435294ddeda5b4506c3117cbd164ca2ff)) +* fix `linear_eval is no 0` serialisation issue ([#1226](https://github.com/noir-lang/noir/issues/1226)) ([41d96ae](https://github.com/noir-lang/noir/commit/41d96ae9bbb9ce7010451cae5dc1f66d5e57d45b)) + ## [0.5.0](https://github.com/noir-lang/noir/compare/v0.4.1...v0.5.0) (2023-04-28) diff --git a/Cargo.lock b/Cargo.lock index b281ed5a9d0..b8647a9622d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -137,7 +137,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.5.0" +version = "0.5.1" dependencies = [ "generational-arena", ] @@ -1161,7 +1161,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.5.0" +version = "0.5.1" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1638,7 +1638,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.5.0" +version = "0.5.1" [[package]] name = "itertools" @@ -1838,7 +1838,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "iter-extended", @@ -1852,7 +1852,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "acvm-backend-barretenberg", @@ -1883,7 +1883,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "build-data", @@ -1899,7 +1899,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "iter-extended", @@ -1911,7 +1911,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "clap", @@ -1926,7 +1926,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.5.0" +version = "0.5.1" dependencies = [ "chumsky", "codespan", @@ -1937,7 +1937,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "arena", @@ -1953,7 +1953,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.5.0" +version = "0.5.1" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index 34dfe88e2ba..badaab032ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.5.0" +version = "0.5.1" # x-release-please-end authors = ["The Noir Team "] edition = "2021" diff --git a/flake.nix b/flake.nix index 03d8f6be43f..7109e266e0f 100644 --- a/flake.nix +++ b/flake.nix @@ -106,7 +106,7 @@ commonArgs = environment // { pname = "noir"; # x-release-please-start-version - version = "0.5.0"; + version = "0.5.1"; # x-release-please-end # Use our custom stdenv to build and test our Rust project From 52ce1fd3234bb81ef203feeff3c3a240860df1df Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 1 May 2023 15:42:55 -0400 Subject: [PATCH 27/66] chore(ssa refactor): Add all remaining doc comments to ssa generation pass (#1256) * Add remaining doc comments * Update crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs Co-authored-by: kevaundray * Update crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs Co-authored-by: kevaundray * Address PR feedback --------- Co-authored-by: kevaundray --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 1 - .../src/ssa_refactor/ir/basic_block.rs | 23 ++++--- .../ssa_refactor/ir/basic_block_visitors.rs | 23 ------- .../src/ssa_refactor/ir/cfg.rs | 57 +++++++++-------- .../src/ssa_refactor/ir/constant.rs | 6 +- .../src/ssa_refactor/ir/dfg.rs | 40 +++--------- .../src/ssa_refactor/ir/function.rs | 30 +++++---- .../src/ssa_refactor/ir/instruction.rs | 44 +++---------- .../src/ssa_refactor/ir/printer.rs | 7 ++- .../src/ssa_refactor/ir/types.rs | 4 ++ .../src/ssa_refactor/ir/value.rs | 1 + .../src/ssa_refactor/ssa_builder/mod.rs | 18 +++++- .../src/ssa_refactor/ssa_gen/context.rs | 61 +++++++++++++++++-- .../src/ssa_refactor/ssa_gen/mod.rs | 3 + .../src/ssa_refactor/ssa_gen/program.rs | 3 +- .../src/ssa_refactor/ssa_gen/value.rs | 27 ++++++++ 16 files changed, 204 insertions(+), 144 deletions(-) delete mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index 1a1ca9eab89..1f6cca9157d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -1,5 +1,4 @@ pub(crate) mod basic_block; -pub(crate) mod basic_block_visitors; pub(crate) mod cfg; pub(crate) mod constant; pub(crate) mod dfg; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index f6ca293f0fd..8a3f74c4a64 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -18,12 +18,6 @@ pub(crate) struct BasicBlock { /// Instructions in the basic block. instructions: Vec, - /// A basic block is considered sealed - /// if no further predecessors will be added to it. - /// Since only filled blocks can have successors, - /// predecessors are always filled. - is_sealed: bool, - /// The terminating instruction for the basic block. /// /// This will be a control flow instruction. This is only @@ -35,14 +29,20 @@ pub(crate) struct BasicBlock { pub(crate) type BasicBlockId = Id; impl BasicBlock { + /// Create a new BasicBlock with the given parameters. + /// Parameters can also be added later via BasicBlock::add_parameter pub(crate) fn new(parameters: Vec) -> Self { - Self { parameters, instructions: Vec::new(), is_sealed: false, terminator: None } + Self { parameters, instructions: Vec::new(), terminator: None } } + /// Returns the parameters of this block pub(crate) fn parameters(&self) -> &[ValueId] { &self.parameters } + /// Adds a parameter to this BasicBlock. + /// Expects that the ValueId given should refer to a Value::Param + /// instance with its position equal to self.parameters.len(). pub(crate) fn add_parameter(&mut self, parameter: ValueId) { self.parameters.push(parameter); } @@ -52,14 +52,23 @@ impl BasicBlock { self.instructions.push(instruction); } + /// Retrieve a reference to all instructions in this block. pub(crate) fn instructions(&self) -> &[InstructionId] { &self.instructions } + /// Sets the terminator instruction of this block. + /// + /// A properly-constructed block will always terminate with a TerminatorInstruction - + /// which either jumps to another block or returns from the current function. A block + /// will only have no terminator if it is still under construction. pub(crate) fn set_terminator(&mut self, terminator: TerminatorInstruction) { self.terminator = Some(terminator); } + /// Returns the terminator of this block. + /// + /// Once this block has finished construction, this is expected to always be Some. pub(crate) fn terminator(&self) -> Option<&TerminatorInstruction> { self.terminator.as_ref() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs deleted file mode 100644 index e0d5dc1b3df..00000000000 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block_visitors.rs +++ /dev/null @@ -1,23 +0,0 @@ -use super::{ - basic_block::{BasicBlock, BasicBlockId}, - instruction::TerminatorInstruction, -}; - -/// Visit all successors of a block with a given visitor closure. The closure -/// arguments are the branch instruction that is used to reach the successor, -/// and the id of the successor block itself. -pub(crate) fn visit_block_succs(basic_block: &BasicBlock, mut visit: F) { - match basic_block - .terminator() - .expect("ICE: No terminator indicates block is still under construction.") - { - TerminatorInstruction::Jmp { destination, .. } => visit(*destination), - TerminatorInstruction::JmpIf { then_destination, else_destination, .. } => { - visit(*then_destination); - visit(*else_destination); - } - TerminatorInstruction::Return { .. } => { - // The last block of the control flow - no successors - } - } -} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs index d443d574ca8..b2d16b29bfd 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs @@ -2,7 +2,6 @@ use std::collections::{HashMap, HashSet}; use super::{ basic_block::{BasicBlock, BasicBlockId}, - basic_block_visitors, function::Function, }; @@ -33,25 +32,30 @@ impl ControlFlowGraph { cfg } + /// Compute all of the edges between each block in the function fn compute(&mut self, func: &Function) { for (basic_block_id, basic_block) in func.dfg.basic_blocks_iter() { self.compute_block(basic_block_id, basic_block); } } + /// Compute all of the edges for the current block given fn compute_block(&mut self, basic_block_id: BasicBlockId, basic_block: &BasicBlock) { - basic_block_visitors::visit_block_succs(basic_block, |dest| { + for dest in basic_block.successors() { self.add_edge(basic_block_id, dest); - }); + } } + /// Clears out a given block's successors. This also removes the given block from + /// being a predecessor of any of its previous successors. fn invalidate_block_successors(&mut self, basic_block_id: BasicBlockId) { let node = self .data .get_mut(&basic_block_id) .expect("ICE: Attempted to invalidate cfg node successors for non-existent node."); - let old_successors = node.successors.clone(); - node.successors.clear(); + + let old_successors = std::mem::take(&mut node.successors); + for successor_id in old_successors { self.data .get_mut(&successor_id) @@ -71,6 +75,7 @@ impl ControlFlowGraph { self.compute_block(basic_block_id, basic_block); } + /// Add a directed edge making `from` a predecessor of `to`. fn add_edge(&mut self, from: BasicBlockId, to: BasicBlockId) { let predecessor_node = self.data.entry(from).or_default(); assert!( @@ -87,7 +92,7 @@ impl ControlFlowGraph { } /// Get an iterator over the CFG predecessors to `basic_block_id`. - pub(crate) fn pred_iter( + pub(crate) fn predecessors( &self, basic_block_id: BasicBlockId, ) -> impl ExactSizeIterator + '_ { @@ -100,7 +105,7 @@ impl ControlFlowGraph { } /// Get an iterator over the CFG successors to `basic_block_id`. - pub(crate) fn succ_iter( + pub(crate) fn successors( &self, basic_block_id: BasicBlockId, ) -> impl ExactSizeIterator + '_ { @@ -133,11 +138,11 @@ mod tests { fn jumps() { // Build function of form // fn func { - // block0(cond: u1): + // block0(cond: u1): // jmpif cond, then: block2, else: block1 - // block1(): + // block1(): // jmpif cond, then: block1, else: block2 - // block2(): + // block2(): // return () // } let func_id = Id::test_new(0); @@ -163,13 +168,13 @@ mod tests { #[allow(clippy::needless_collect)] { - let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); - let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); - let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); + let block0_predecessors: Vec<_> = cfg.predecessors(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.predecessors(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.predecessors(block2_id).collect(); - let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); - let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); - let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); + let block0_successors: Vec<_> = cfg.successors(block0_id).collect(); + let block1_successors: Vec<_> = cfg.successors(block1_id).collect(); + let block2_successors: Vec<_> = cfg.successors(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); @@ -192,13 +197,13 @@ mod tests { // Modify function to form: // fn func { - // block0(cond: u1): + // block0(cond: u1): // jmpif cond, then: block1, else: ret_block - // block1(): + // block1(): // jmpif cond, then: block1, else: block2 - // block2(): + // block2(): // jmp ret_block() - // ret_block(): + // ret_block(): // return () // } let ret_block_id = func.dfg.make_block(); @@ -221,13 +226,13 @@ mod tests { #[allow(clippy::needless_collect)] { - let block0_predecessors: Vec<_> = cfg.pred_iter(block0_id).collect(); - let block1_predecessors: Vec<_> = cfg.pred_iter(block1_id).collect(); - let block2_predecessors: Vec<_> = cfg.pred_iter(block2_id).collect(); + let block0_predecessors: Vec<_> = cfg.predecessors(block0_id).collect(); + let block1_predecessors: Vec<_> = cfg.predecessors(block1_id).collect(); + let block2_predecessors: Vec<_> = cfg.predecessors(block2_id).collect(); - let block0_successors: Vec<_> = cfg.succ_iter(block0_id).collect(); - let block1_successors: Vec<_> = cfg.succ_iter(block1_id).collect(); - let block2_successors: Vec<_> = cfg.succ_iter(block2_id).collect(); + let block0_successors: Vec<_> = cfg.successors(block0_id).collect(); + let block1_successors: Vec<_> = cfg.successors(block1_id).collect(); + let block2_successors: Vec<_> = cfg.successors(block2_id).collect(); assert_eq!(block0_predecessors.len(), 0); assert_eq!(block1_predecessors.len(), 2); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs index 4c793a144da..63c1e528471 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/constant.rs @@ -2,7 +2,7 @@ use acvm::FieldElement; use super::map::Id; -/// Represents a numeric constant in Ssa. Constants themselves are +/// Represents a numeric constant in SSA. Constants themselves are /// uniqued in the DataFlowGraph and immutable. /// /// This is just a thin wrapper around FieldElement so that @@ -12,10 +12,12 @@ use super::map::Id; pub(crate) struct NumericConstant(FieldElement); impl NumericConstant { + /// Create a new NumericConstant with the given Field value pub(crate) fn new(value: FieldElement) -> Self { Self(value) } + /// Retrieves the Field value for this constant pub(crate) fn value(&self) -> FieldElement { self.0 } @@ -23,6 +25,8 @@ impl NumericConstant { pub(crate) type NumericConstantId = Id; +// Implement some common numeric operations for NumericConstants +// for convenience so developers do not always have to unwrap them to use them. impl std::ops::Add for NumericConstant { type Output = NumericConstant; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 4d2ebe31efb..67569c6a4c2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -15,34 +15,10 @@ use super::{ use acvm::FieldElement; use iter_extended::vecmap; -#[derive(Debug, Default)] -/// A convenience wrapper to store `Value`s. -pub(crate) struct ValueList(Vec>); - -impl ValueList { - /// Inserts an element to the back of the list and - /// returns the `position` - pub(crate) fn push(&mut self, value: ValueId) -> usize { - self.0.push(value); - self.len() - 1 - } - - /// Returns the number of values in the list. - fn len(&self) -> usize { - self.0.len() - } - - /// Removes all items from the list. - fn clear(&mut self) { - self.0.clear(); - } - - /// Returns the ValueId's as a slice. - pub(crate) fn as_slice(&self) -> &[ValueId] { - &self.0 - } -} - +/// The DataFlowGraph contains most of the actual data in a function including +/// its blocks, instructions, and values. This struct is largely responsible for +/// owning most data in a function and handing out Ids to this data that can be +/// shared without worrying about ownership. #[derive(Debug, Default)] pub(crate) struct DataFlowGraph { /// All of the instructions in a function @@ -57,7 +33,7 @@ pub(crate) struct DataFlowGraph { /// Currently, we need to define them in a better way /// Call instructions require the func signature, but /// other instructions may need some more reading on my part - results: HashMap, + results: HashMap>, /// Storage for all of the values defined in this /// function. @@ -243,8 +219,7 @@ impl DataFlowGraph { }); // Add value to the list of results for this instruction - let actual_res_position = results.push(value_id); - assert_eq!(actual_res_position, expected_res_position); + results.push(value_id); value_id } @@ -259,6 +234,7 @@ impl DataFlowGraph { self.results.get(&instruction_id).expect("expected a list of Values").as_slice() } + /// Add a parameter to the given block pub(crate) fn add_block_parameter(&mut self, block_id: BasicBlockId, typ: Type) -> Id { let block = &mut self.blocks[block_id]; let position = block.parameters().len(); @@ -267,6 +243,8 @@ impl DataFlowGraph { parameter } + /// Insert an instruction at the end of a given block. + /// If the block already has a terminator, the instruction is inserted before the terminator. pub(crate) fn insert_instruction_in_block( &mut self, block: BasicBlockId, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 6789e5364fe..8d90a139118 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -1,24 +1,16 @@ -use std::collections::HashMap; - use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; -use super::instruction::InstructionId; use super::map::Id; use super::types::Type; -use noirc_errors::Location; - /// A function holds a list of instructions. /// These instructions are further grouped into Basic blocks /// -/// Like Crane-lift all functions outside of the current function is seen as external. -/// To reference external functions, one must first import the function signature -/// into the current function's context. +/// All functions outside of the current function are seen as external. +/// To reference external functions its FunctionId can be used but this +/// cannot be checked for correctness until inlining is performed. #[derive(Debug)] pub struct Function { - /// Maps instructions to source locations - source_locations: HashMap, - /// The first basic block in the function entry_block: BasicBlockId, @@ -27,6 +19,8 @@ pub struct Function { id: FunctionId, + /// The DataFlowGraph holds the majority of data pertaining to the function + /// including its blocks, instructions, and values. pub(crate) dfg: DataFlowGraph, } @@ -37,23 +31,35 @@ impl Function { pub(crate) fn new(name: String, id: FunctionId) -> Self { let mut dfg = DataFlowGraph::default(); let entry_block = dfg.make_block(); - Self { name, source_locations: HashMap::new(), id, entry_block, dfg } + Self { name, id, entry_block, dfg } } + /// The name of the function. + /// Used exclusively for debugging purposes. pub(crate) fn name(&self) -> &str { &self.name } + /// The id of the function. pub(crate) fn id(&self) -> FunctionId { self.id } + /// Retrieves the entry block of a function. + /// + /// A function's entry block contains the instructions + /// to be executed first when the function is called. + /// The function's parameters are also stored as the + /// entry block's parameters. pub(crate) fn entry_block(&self) -> BasicBlockId { self.entry_block } } /// FunctionId is a reference for a function +/// +/// This Id is how each function refers to other functions +/// within Call instructions. pub(crate) type FunctionId = Id; #[derive(Debug, Default, Clone)] diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 545519e316f..66f8b1e3b17 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -3,6 +3,11 @@ use acvm::acir::BlackBoxFunc; use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; /// Reference to an instruction +/// +/// Note that InstructionIds are not unique. That is, two InstructionIds +/// may refer to the same Instruction data. This is because, although +/// identical, instructions may have different results based on their +/// placement within a block. pub(crate) type InstructionId = Id; /// These are similar to built-ins in other languages. @@ -36,6 +41,8 @@ impl std::fmt::Display for Intrinsic { } impl Intrinsic { + /// Lookup an Intrinsic by name and return it if found. + /// If there is no such intrinsic by that name, None is returned. pub(crate) fn lookup(name: &str) -> Option { match name { "println" => Some(Intrinsic::Println), @@ -94,42 +101,6 @@ pub(crate) enum Instruction { } impl Instruction { - /// Returns the number of results that this instruction - /// produces. - pub(crate) fn num_fixed_results(&self) -> usize { - match self { - Instruction::Binary(_) => 1, - Instruction::Cast(..) => 0, - Instruction::Not(_) => 1, - Instruction::Truncate { .. } => 1, - Instruction::Constrain(_) => 0, - // This returns 0 as the result depends on the function being called - Instruction::Call { .. } => 0, - Instruction::Allocate { .. } => 1, - Instruction::Load { .. } => 1, - Instruction::Store { .. } => 0, - } - } - - /// Returns the number of arguments required for a call - pub(crate) fn num_fixed_arguments(&self) -> usize { - // Match-all fields syntax (..) is avoided on most cases of this match to ensure that - // if an extra argument is ever added to any of these variants, an error - // is issued pointing to this spot to update it here as well. - match self { - Instruction::Binary(_) => 2, - Instruction::Cast(_, _) => 1, - Instruction::Not(_) => 1, - Instruction::Truncate { value: _, bit_size: _, max_bit_size: _ } => 1, - Instruction::Constrain(_) => 1, - // This returns 0 as the arguments depend on the function being called - Instruction::Call { .. } => 0, - Instruction::Allocate { size: _ } => 1, - Instruction::Load { address: _ } => 1, - Instruction::Store { address: _, value: _ } => 2, - } - } - /// Returns the type that this instruction will return. pub(crate) fn result_type(&self) -> InstructionResultType { match self { @@ -204,6 +175,7 @@ pub(crate) struct Binary { } impl Binary { + /// The type of this Binary instruction's result pub(crate) fn result_type(&self) -> InstructionResultType { match self.operator { BinaryOp::Eq | BinaryOp::Lt => InstructionResultType::Known(Type::bool()), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index a0ab65bf639..2e467017885 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -13,6 +13,7 @@ use super::{ value::ValueId, }; +/// Helper function for Function's Display impl to pretty-print the function with the given formatter. pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "fn {} {} {{", function.name(), function.id())?; display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; @@ -20,7 +21,7 @@ pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result } /// Displays a block followed by all of its successors recursively. -/// This uses a HashSet to keep track of the visited blocks. Otherwise, +/// This uses a HashSet to keep track of the visited blocks. Otherwise /// there would be infinite recursion for any loops in the IR. pub(crate) fn display_block_with_successors( function: &Function, @@ -39,6 +40,7 @@ pub(crate) fn display_block_with_successors( Ok(()) } +/// Display a single block. This will not display the block's successors. pub(crate) fn display_block( function: &Function, block_id: BasicBlockId, @@ -80,10 +82,12 @@ fn value_list_with_types(function: &Function, values: &[ValueId]) -> String { .join(", ") } +/// Display each value separated by a comma fn value_list(function: &Function, values: &[ValueId]) -> String { vecmap(values, |id| value(function, *id)).join(", ") } +/// Display a terminator instruction pub(crate) fn display_terminator( function: &Function, terminator: Option<&TerminatorInstruction>, @@ -109,6 +113,7 @@ pub(crate) fn display_terminator( } } +/// Display an arbitrary instruction pub(crate) fn display_instruction( function: &Function, instruction: InstructionId, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs index 8a0f825a117..e00c25a257c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs @@ -30,18 +30,22 @@ pub(crate) enum Type { } impl Type { + /// Create a new signed integer type with the given amount of bits. pub(crate) fn signed(bit_size: u32) -> Type { Type::Numeric(NumericType::Signed { bit_size }) } + /// Create a new unsigned integer type with the given amount of bits. pub(crate) fn unsigned(bit_size: u32) -> Type { Type::Numeric(NumericType::Unsigned { bit_size }) } + /// Creates the boolean type, represented as u1. pub(crate) fn bool() -> Type { Type::unsigned(1) } + /// Creates the native field type. pub(crate) fn field() -> Type { Type::Numeric(NumericType::NativeField) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs index 868aee2199e..f8197b06c8a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs @@ -47,6 +47,7 @@ pub(crate) enum Value { } impl Value { + /// Retrieves the type of this Value pub(crate) fn get_type(&self) -> Type { match self { Value::Instruction { typ, .. } => *typ, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index df80799c28a..aa67cbed583 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -27,6 +27,10 @@ pub(crate) struct FunctionBuilder { } impl FunctionBuilder { + /// Creates a new FunctionBuilder to build the function with the given FunctionId. + /// + /// This creates the new function internally so there is no need to call .new_function() + /// right after constructing a new FunctionBuilder. pub(crate) fn new(function_name: String, function_id: FunctionId) -> Self { let new_function = Function::new(function_name, function_id); let current_block = new_function.entry_block(); @@ -34,7 +38,11 @@ impl FunctionBuilder { Self { current_function: new_function, current_block, finished_functions: Vec::new() } } - /// Finish the current function and create a new function + /// Finish the current function and create a new function. + /// + /// A FunctionBuilder can always only work on one function at a time, so care + /// should be taken not to finish a function that is still in progress by calling + /// new_function before the current function is finished. pub(crate) fn new_function(&mut self, name: String, function_id: FunctionId) { let new_function = Function::new(name, function_id); self.current_block = new_function.entry_block(); @@ -43,11 +51,14 @@ impl FunctionBuilder { self.finished_functions.push(old_function); } + /// Consume the FunctionBuilder returning all the functions it has generated. pub(crate) fn finish(mut self) -> Ssa { self.finished_functions.push(self.current_function); Ssa::new(self.finished_functions) } + /// Add a parameter to the current function with the given parameter type. + /// Returns the newly-added parameter. pub(crate) fn add_parameter(&mut self, typ: Type) -> ValueId { let entry = self.current_function.entry_block(); self.current_function.dfg.add_block_parameter(entry, typ) @@ -67,14 +78,19 @@ impl FunctionBuilder { self.numeric_constant(value.into(), Type::field()) } + /// Returns the type of the given value. pub(crate) fn type_of_value(&self, value: ValueId) -> Type { self.current_function.dfg.type_of_value(value) } + /// Insert a new block into the current function and return it. + /// Note that this block is unreachable until another block is set to jump to it. pub(crate) fn insert_block(&mut self) -> BasicBlockId { self.current_function.dfg.make_block() } + /// Adds a parameter with the given type to the given block. + /// Returns the newly-added parameter. pub(crate) fn add_block_parameter(&mut self, block: BasicBlockId, typ: Type) -> ValueId { self.current_function.dfg.add_block_parameter(block, typ) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index df54d5bd079..78c64f9fad8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -16,9 +16,17 @@ use crate::ssa_refactor::ssa_builder::FunctionBuilder; use super::value::{Tree, Value, Values}; -// TODO: Make this a threadsafe queue so we can compile functions in parallel -type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; - +/// The FunctionContext is the main context object for translating a +/// function into SSA form during the SSA-gen pass. +/// +/// This context can be used to build any amount of functions, +/// so long as it is cleared out in between each function via +/// calling self.new_function(). +/// +/// If compiling many functions across multiple threads, there should +/// be a separate FunctionContext for each thread. Each FunctionContext +/// can communicate via the SharedContext field which as its name suggests +/// is the only part of the context that needs to be shared between threads. pub(super) struct FunctionContext<'a> { definitions: HashMap, @@ -26,16 +34,50 @@ pub(super) struct FunctionContext<'a> { shared_context: &'a SharedContext, } -/// Shared context for all functions during ssa codegen +/// Shared context for all functions during ssa codegen. This is the only +/// object that is shared across all threads when generating ssa in multiple threads. +/// +/// The main job of the SharedContext is to remember which functions are already +/// compiled, what their IDs are, and keep a queue of which functions still need to +/// be compiled. +/// +/// SSA can be generated by continuously popping from this function_queue and using +/// FunctionContext to generate from the popped function id. Once the queue is empty, +/// no other functions are reachable and the SSA generation is finished. pub(super) struct SharedContext { + /// All currently known functions which have already been assigned function ids. + /// These functions are all either currently having their SSA generated or are + /// already finished. functions: RwLock>, + + /// Queue of which functions still need to be compiled. + /// + /// The queue is currently Last-in First-out (LIFO) but this is an + /// implementation detail that can be trivially changed and should + /// not impact the resulting SSA besides changing which IDs are assigned + /// to which functions. function_queue: Mutex, + + /// Shared counter used to assign the ID of the next function function_counter: AtomicCounter, + /// The entire monomorphized source program pub(super) program: Program, } +/// The queue of functions remaining to compile +type FunctionQueue = Vec<(ast::FuncId, IrFunctionId)>; + impl<'a> FunctionContext<'a> { + /// Create a new FunctionContext to compile the first function in the shared_context's + /// function queue. + /// + /// This will pop from the function queue, so it is expected the shared_context's function + /// queue is non-empty at the time of calling this function. This can be ensured by calling + /// `shared_context.get_or_queue_function(function_to_queue)` before calling this constructor. + /// + /// `function_name` and `parameters` are expected to be the name and parameters of the function + /// this constructor will pop from the function queue. pub(super) fn new( function_name: String, parameters: &Parameters, @@ -52,6 +94,11 @@ impl<'a> FunctionContext<'a> { this } + /// Finish building the current function and switch to building a new function with the + /// given name, id, and parameters. + /// + /// Note that the previous function cannot be resumed after calling this. Developers should + /// avoid calling new_function until the previous function is completely finished with ssa-gen. pub(super) fn new_function(&mut self, id: IrFunctionId, name: String, parameters: &Parameters) { self.definitions.clear(); self.builder.new_function(name, id); @@ -127,6 +174,10 @@ impl<'a> FunctionContext<'a> { Self::map_type_helper(typ, &mut |x| x) } + /// Converts a non-tuple type into an SSA type. Panics if a tuple type is passed. + /// + /// This function is needed since this SSA IR has no concept of tuples and thus no type for + /// them. Use `convert_type` if tuple types need to be handled correctly. pub(super) fn convert_non_tuple_type(typ: &ast::Type) -> Type { match typ { ast::Type::Field => Type::field(), @@ -305,6 +356,7 @@ fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { } impl SharedContext { + /// Create a new SharedContext for the given monomorphized program. pub(super) fn new(program: Program) -> Self { Self { functions: Default::default(), @@ -314,6 +366,7 @@ impl SharedContext { } } + /// Pops the next function from the shared function queue, returning None if the queue is empty. pub(super) fn pop_next_function_in_queue(&self) -> Option<(ast::FuncId, IrFunctionId)> { self.function_queue.lock().expect("Failed to lock function_queue").pop() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 4b93a7e1185..d6c5731e147 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -16,6 +16,9 @@ use self::{ use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; +/// Generates SSA for the given monomorphized program. +/// +/// This function will generate the SSA but does not perform any optimizations on it. pub fn generate_ssa(program: Program) -> Ssa { let context = SharedContext::new(program); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs index 03eb76dec50..99d49456210 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs @@ -2,12 +2,13 @@ use std::fmt::Display; use crate::ssa_refactor::ir::function::Function; -/// Contains the entire Ssa representation of the program +/// Contains the entire SSA representation of the program. pub struct Ssa { functions: Vec, } impl Ssa { + /// Create a new Ssa object from the given SSA functions pub fn new(functions: Vec) -> Self { Self { functions } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs index fa27e70ad9b..02011adbaa8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs @@ -5,12 +5,27 @@ use crate::ssa_refactor::ir::value::ValueId as IrValueId; use super::context::FunctionContext; +/// A general Tree structure which is used in the SSA generation pass +/// to represent both values and types which may be tuples. +/// +/// Since the underlying SSA intermediate representation (IR) does not +/// support tuples directly, they're instead represented as Tree::Branch +/// nodes. For example, a single ssa value may be a Tree::Leaf(Value), +/// while a tuple would be a Tree::Branch(values). #[derive(Debug, Clone)] pub(super) enum Tree { Branch(Vec>), Leaf(T), } +/// A single value in ssa form. This wrapper enum is needed mostly to enable +/// us to automatically create a Instruction::Load whenever a mutable variable +/// is referenced. +/// +/// Note that these values wrap the ValueIds +/// used internally by functions in the ssa ir and should thus be isolated +/// to a given function. If used outisde their function of origin, the IDs +/// would be invalid. #[derive(Debug, Copy, Clone)] pub(super) enum Value { Normal(IrValueId), @@ -43,9 +58,15 @@ impl Value { } } +/// A tree of values. +/// +/// Compared to Value alone, the addition of being able to represent structs/tuples as +/// a Tree::Branch means this type can hold any kind of value an frontend expression may return. +/// This is why it is used as the return type for every codegen_* function in ssa_gen/mod.rs. pub(super) type Values = Tree; impl Tree { + /// Flattens the tree into a vector of each leaf value pub(super) fn flatten(self) -> Vec { match self { Tree::Branch(values) => values.into_iter().flat_map(Tree::flatten).collect(), @@ -53,6 +74,7 @@ impl Tree { } } + /// Returns the total amount of leaves in this tree pub(super) fn count_leaves(&self) -> usize { match self { Tree::Branch(trees) => trees.iter().map(|tree| tree.count_leaves()).sum(), @@ -72,6 +94,7 @@ impl Tree { } } + /// Map mutably over this tree, mutating each leaf value within using the given function pub(super) fn map_mut(&mut self, mut f: impl FnMut(&T) -> Tree) { self.map_mut_helper(&mut f); } @@ -83,6 +106,10 @@ impl Tree { } } + /// Calls the given function on each leaf node, mapping this tree into a new one. + /// + /// Because the given function returns a Tree rather than a U, it is possible + /// to use this function to turn Leaf nodes into either other Leaf nodes or even Branch nodes. pub(super) fn map(self, mut f: impl FnMut(T) -> Tree) -> Tree { self.map_helper(&mut f) } From 752d2f97a80ec40fb31a136df4e662dfab61e8be Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 13:26:17 +0100 Subject: [PATCH 28/66] chore: fix clippy warning (#1270) --- crates/noirc_evaluator/src/ssa_refactor/ir/map.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index a99ff06c5fb..14ea521359d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -123,7 +123,7 @@ impl DenseMap { /// /// The id-element pairs are ordered by the numeric values of the ids. pub(crate) fn iter(&self) -> impl ExactSizeIterator, &T)> { - let ids_iter = (0..self.storage.len()).into_iter().map(|idx| Id::new(idx)); + let ids_iter = (0..self.storage.len()).map(|idx| Id::new(idx)); ids_iter.zip(self.storage.iter()) } } From 562c185af5a97b755f5a554a32da518562f88be1 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Tue, 2 May 2023 15:55:15 +0100 Subject: [PATCH 29/66] chore(noir): constrain expr; -> assert(expr); (#1276) * chore(noir): constrain expr; -> assert(expr); * chore(noir): replace remaining `constrain` with `assert(expr)` --------- Co-authored-by: Tom French --- crates/nargo_cli/src/cli/new_cmd.rs | 2 +- .../tests/compile_tests_data/pass/basic.nr | 2 +- .../compile_tests_data/pass/basic_import.nr | 2 +- .../target_tests_data/pass/basic/src/main.nr | 2 +- .../target_tests_data/pass/import/src/main.nr | 2 +- .../tests/test_data/1_mul/src/main.nr | 2 +- .../tests/test_data/2_div/src/main.nr | 4 +- .../tests/test_data/3_add/src/main.nr | 4 +- .../tests/test_data/4_sub/src/main.nr | 2 +- .../tests/test_data/5_over/src/main.nr | 4 +- .../nargo_cli/tests/test_data/6/src/main.nr | 4 +- .../tests/test_data/6_array/src/main.nr | 24 +-- .../nargo_cli/tests/test_data/7/src/main.nr | 2 +- .../tests/test_data/7_function/src/main.nr | 32 ++-- .../tests/test_data/8_integration/src/main.nr | 24 +-- .../tests/test_data/9_conditional/src/main.nr | 62 +++--- .../tests/test_data/array_len/src/main.nr | 14 +- .../tests/test_data/array_neq/src/main.nr | 2 +- .../tests/test_data/assign_ex/src/main.nr | 4 +- .../tests/test_data/bit_and/src/main.nr | 6 +- .../tests/test_data/bool_not/src/main.nr | 2 +- .../tests/test_data/bool_or/src/main.nr | 4 +- .../tests/test_data/cast_bool/src/main.nr | 2 +- .../comptime_array_access/src/main.nr | 6 +- .../tests/test_data/comptime_fail/src/main.nr | 6 +- .../comptime_recursion_regression/src/main.nr | 2 +- .../tests/test_data/contracts/src/main.nr | 2 +- .../test_data/ec_baby_jubjub/src/main.nr | 179 ++++++++++-------- .../tests/test_data/generics/src/main.nr | 12 +- .../tests/test_data/global_consts/src/baz.nr | 2 +- .../tests/test_data/global_consts/src/foo.nr | 2 +- .../tests/test_data/global_consts/src/main.nr | 30 +-- .../higher-order-functions/src/main.nr | 22 +-- .../tests/test_data/if_else_chain/src/main.nr | 10 +- .../tests/test_data/main_bool_arg/src/main.nr | 6 +- .../tests/test_data/merkle_insert/src/main.nr | 8 +- .../tests/test_data/modules/src/main.nr | 2 +- .../tests/test_data/modules_more/src/main.nr | 2 +- .../tests/test_data/modulus/src/main.nr | 10 +- .../test_data/numeric_generics/src/main.nr | 10 +- .../test_data/pedersen_check/src/main.nr | 6 +- .../test_data/poseidon_bn254_hash/src/main.nr | 4 +- .../poseidonsponge_x5_254/src/main.nr | 2 +- .../tests/test_data/pred_eq/src/main.nr | 2 +- .../tests/test_data/regression/src/main.nr | 14 +- .../tests/test_data/scalar_mul/src/main.nr | 4 +- .../tests/test_data/schnorr/src/main.nr | 2 +- .../tests/test_data/sha256/src/main.nr | 2 +- .../tests/test_data/sha2_blocks/src/main.nr | 8 +- .../tests/test_data/sha2_byte/src/main.nr | 4 +- .../tests/test_data/simple_shield/src/main.nr | 2 +- .../tests/test_data/strings/src/main.nr | 12 +- .../tests/test_data/struct/src/main.nr | 16 +- .../tests/test_data/struct_inputs/src/main.nr | 16 +- .../tests/test_data/submodules/src/main.nr | 2 +- .../tests/test_data/to_be_bytes/src/main.nr | 6 +- .../to_bytes_integration/src/main.nr | 8 +- .../tests/test_data/tuples/src/main.nr | 12 +- .../nargo_cli/tests/test_data/xor/src/main.nr | 2 +- .../src/hir/resolution/resolver.rs | 8 +- examples_failing/ecdsa_secp256k1/src/main.nr | 2 +- examples_failing/pow_const/src/main.nr | 2 +- noir_stdlib/src/ec/montcurve.nr | 18 +- noir_stdlib/src/ec/swcurve.nr | 10 +- noir_stdlib/src/ec/tecurve.nr | 8 +- noir_stdlib/src/hash/poseidon.nr | 10 +- noir_stdlib/src/hash/poseidon/bn254.nr | 8 +- 67 files changed, 362 insertions(+), 347 deletions(-) diff --git a/crates/nargo_cli/src/cli/new_cmd.rs b/crates/nargo_cli/src/cli/new_cmd.rs index 9d39f8d1d83..36146028454 100644 --- a/crates/nargo_cli/src/cli/new_cmd.rs +++ b/crates/nargo_cli/src/cli/new_cmd.rs @@ -27,7 +27,7 @@ compiler_version = "{CARGO_PKG_VERSION}" ); const EXAMPLE: &str = r#"fn main(x : Field, y : pub Field) { - constrain x != y; + assert(x != y); } #[test] diff --git a/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr b/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr index 6a678f93fe7..90c0d7ffd3e 100644 --- a/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr +++ b/crates/nargo_cli/tests/compile_tests_data/pass/basic.nr @@ -1,4 +1,4 @@ fn main(x : Field, y : Field) { - constrain x != y; + assert(x != y); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr b/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr index 212d0f13590..bb61c0f1edc 100644 --- a/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr +++ b/crates/nargo_cli/tests/compile_tests_data/pass/basic_import.nr @@ -7,5 +7,5 @@ fn main(x : Field, y : Field) { let _k = std::hash::pedersen([x]); let _l = hello(x); - constrain x != import::hello(y); + assert(x != import::hello(y)); } diff --git a/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr b/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr index 6a678f93fe7..90c0d7ffd3e 100644 --- a/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr +++ b/crates/nargo_cli/tests/target_tests_data/pass/basic/src/main.nr @@ -1,4 +1,4 @@ fn main(x : Field, y : Field) { - constrain x != y; + assert(x != y); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr b/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr index 58fb0c3f3f2..cb6476480d8 100644 --- a/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr +++ b/crates/nargo_cli/tests/target_tests_data/pass/import/src/main.nr @@ -5,5 +5,5 @@ fn main(x : Field, y : Field) { let _k = dep::std::hash::pedersen([x]); let _l = hello(x); - constrain x != import::hello(y); + assert(x != import::hello(y)); } diff --git a/crates/nargo_cli/tests/test_data/1_mul/src/main.nr b/crates/nargo_cli/tests/test_data/1_mul/src/main.nr index e423eb65667..4587b4b5947 100644 --- a/crates/nargo_cli/tests/test_data/1_mul/src/main.nr +++ b/crates/nargo_cli/tests/test_data/1_mul/src/main.nr @@ -5,5 +5,5 @@ fn main(mut x: u32, y: u32, z: u32) { x *= x; //144 x *= x; //20736 x *= x; //429 981 696 - constrain x == z; + assert(x == z); } diff --git a/crates/nargo_cli/tests/test_data/2_div/src/main.nr b/crates/nargo_cli/tests/test_data/2_div/src/main.nr index 6df75492553..00608cb697d 100644 --- a/crates/nargo_cli/tests/test_data/2_div/src/main.nr +++ b/crates/nargo_cli/tests/test_data/2_div/src/main.nr @@ -1,6 +1,6 @@ // Testing integer division: 7/3 = 2 fn main(mut x: u32, y: u32, z: u32) { let a = x % y; - constrain x / y == z; - constrain a == x - z*y; + assert(x / y == z); + assert(a == x - z*y); } diff --git a/crates/nargo_cli/tests/test_data/3_add/src/main.nr b/crates/nargo_cli/tests/test_data/3_add/src/main.nr index 73ed46a8e5d..2884415b81a 100644 --- a/crates/nargo_cli/tests/test_data/3_add/src/main.nr +++ b/crates/nargo_cli/tests/test_data/3_add/src/main.nr @@ -1,8 +1,8 @@ // Test integer addition: 3 + 4 = 7 fn main(mut x: u32, y: u32, z: u32) { x += y; - constrain x == z; + assert(x == z); x *= 8; - constrain x>9; + assert(x>9); } diff --git a/crates/nargo_cli/tests/test_data/4_sub/src/main.nr b/crates/nargo_cli/tests/test_data/4_sub/src/main.nr index 242be90970f..80fc0177e41 100644 --- a/crates/nargo_cli/tests/test_data/4_sub/src/main.nr +++ b/crates/nargo_cli/tests/test_data/4_sub/src/main.nr @@ -1,5 +1,5 @@ // Test unsafe integer subtraction with underflow: 12 - 2418266113 = 1876701195 modulo 2^32 fn main(mut x: u32, y: u32, z: u32) { x -= y; - constrain x == z; + assert(x == z); } diff --git a/crates/nargo_cli/tests/test_data/5_over/src/main.nr b/crates/nargo_cli/tests/test_data/5_over/src/main.nr index 8701c1e6320..4fdff16c5c0 100644 --- a/crates/nargo_cli/tests/test_data/5_over/src/main.nr +++ b/crates/nargo_cli/tests/test_data/5_over/src/main.nr @@ -2,8 +2,8 @@ // Test odd bits integer fn main(mut x: u32, y: u32) { x = x * x; - constrain y == x; + assert(y == x); let c:u3 = 2; - constrain c > x as u3; + assert(c > x as u3); } diff --git a/crates/nargo_cli/tests/test_data/6/src/main.nr b/crates/nargo_cli/tests/test_data/6/src/main.nr index 61be34e6d88..8b350de16c1 100644 --- a/crates/nargo_cli/tests/test_data/6/src/main.nr +++ b/crates/nargo_cli/tests/test_data/6/src/main.nr @@ -12,9 +12,9 @@ fn main(x: [u8; 5], result: pub [u8; 32]) { let mut digest = std::hash::sha256(x); digest[0] = 5 as u8; digest = std::hash::sha256(x); - constrain digest == result; + assert(digest == result); let y = [12,45,78,41]; let h = std::hash::mimc_bn254(y); - constrain h == 18226366069841799622585958305961373004333097209608110160936134895615261821931; + assert(h == 18226366069841799622585958305961373004333097209608110160936134895615261821931); } diff --git a/crates/nargo_cli/tests/test_data/6_array/src/main.nr b/crates/nargo_cli/tests/test_data/6_array/src/main.nr index 3537740f1e5..30d3ab5a22f 100644 --- a/crates/nargo_cli/tests/test_data/6_array/src/main.nr +++ b/crates/nargo_cli/tests/test_data/6_array/src/main.nr @@ -8,7 +8,7 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { c = z*z*y[i]; z -= c; } - constrain (z==0); //y[4]=0, so c and z are always 0 + assert(z==0); //y[4]=0, so c and z are always 0 //Test 2: c = 2301 as u32; @@ -17,7 +17,7 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { c = z*z*x[i]; z += x[i]*y[i] - c; } - constrain (z==3814912846); + assert(z==3814912846); //Test 3: c = 2300001 as u32; @@ -29,7 +29,7 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { z *= c; } } - constrain (z==41472); + assert(z==41472); //Test 4: z = y[4]; @@ -39,16 +39,16 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { z += x[i+j] - y[i+j]; } } - constrain (z ==11539); + assert(z ==11539); //Test 5: let cc = if z < 1 { x } else { y }; - constrain cc[0] == y[0]; + assert(cc[0] == y[0]); // Test 6: for-each loops for y_elem in y { for x_elem in x { - constrain x_elem != y_elem; + assert(x_elem != y_elem); } } @@ -57,15 +57,15 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { } // fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { -// constrain x[y] == 111; -// constrain x[z] == 101; +// assert(x[y] == 111); +// assert(x[z] == 101); // x[z] = 0; -// constrain x[y] == 111; -// constrain x[1] == 0; +// assert(x[y] == 111); +// assert(x[1] == 0); // if y as u32 < 10 { // x[y] = x[y] - 2; // } else { // x[y] = 0; // } -// constrain x[4] == 109; -// } \ No newline at end of file +// assert(x[4] == 109); +// } diff --git a/crates/nargo_cli/tests/test_data/7/src/main.nr b/crates/nargo_cli/tests/test_data/7/src/main.nr index ec01ea7c4be..a6bba978644 100644 --- a/crates/nargo_cli/tests/test_data/7/src/main.nr +++ b/crates/nargo_cli/tests/test_data/7/src/main.nr @@ -6,5 +6,5 @@ use dep::std; fn main(x: [u8; 5], result: [u8; 32]) { let digest = std::hash::blake2s(x); - constrain digest == result; + assert(digest == result); } diff --git a/crates/nargo_cli/tests/test_data/7_function/src/main.nr b/crates/nargo_cli/tests/test_data/7_function/src/main.nr index 96ca9759a8f..5a23b493871 100644 --- a/crates/nargo_cli/tests/test_data/7_function/src/main.nr +++ b/crates/nargo_cli/tests/test_data/7_function/src/main.nr @@ -15,20 +15,20 @@ fn f2(mut x: Field) -> Field{ // Simple example fn test0(mut a: Field) { a = f2(a); - constrain a == 3; + assert(a == 3); } // Nested call fn test1(mut a: Field) { a = f1(a); - constrain a == 4; + assert(a == 4); } fn test2(z: Field, t: u32 ) { let a = z + t as Field; - constrain a == 64; + assert(a == 64); let e = pow(z, t as Field); - constrain e == 714924299; + assert(e == 714924299); } fn pow(base: Field, exponent: Field) -> Field { @@ -46,7 +46,7 @@ fn test3(x: [u8; 3]) -> [u8; 3] { for i in 0..3 { buffer[i] = x[i]; } - constrain buffer == x; + assert(buffer == x); buffer } @@ -59,7 +59,7 @@ fn test_multiple2() -> my_struct { } fn test_multiple3(x: u32, y: u32) { - constrain x == y; + assert(x == y); } struct my_struct { @@ -73,18 +73,18 @@ struct my2 { } fn test_multiple4(s: my_struct) { - constrain s.a == s.b+2; + assert(s.a == s.b+2); } fn test_multiple5(a: (u32, u32)) { - constrain a.0 == a.1+2; + assert(a.0 == a.1+2); } fn test_multiple6(a: my2, b: my_struct, c: (my2, my_struct)) { test_multiple4(a.aa); test_multiple5((b.a, b.b)); - constrain c.0.aa.a == c.1.a; + assert(c.0.aa.a == c.1.a); } @@ -110,28 +110,28 @@ fn main(x: u32 , y: u32 , a: Field, arr1: [u32; 9], arr2: [u32; 9]) { ab = ab + a; (x,ab) }; - constrain my_block.1 == 4; + assert(my_block.1 == 4); test0(a); test1(a); test2(x as Field, y); - constrain bar()[0] == 0; + assert(bar()[0] == 0); let mut b = [0 as u8, 5 as u8, 2 as u8]; let c = test3(b); - constrain b == c; + assert(b == c); b[0] = 1 as u8; let cc = test3(b); - constrain c != cc; + assert(c != cc); let e = test_multiple(x, y); - constrain e.1 == e.0 + 54 as u32; + assert(e.1 == e.0 + 54 as u32); let d = test_multiple2(); - constrain d.b == d.a + 2 as u32; + assert(d.b == d.a + 2 as u32); test_multiple3(y, y); //Regression test for issue #628: let result = first(arr_to_field(arr1), arr_to_field(arr2)); - constrain result[0] == arr1[0] as Field; + assert(result[0] == arr1[0] as Field); } diff --git a/crates/nargo_cli/tests/test_data/8_integration/src/main.nr b/crates/nargo_cli/tests/test_data/8_integration/src/main.nr index 57dca4e2ac0..56b02650c27 100644 --- a/crates/nargo_cli/tests/test_data/8_integration/src/main.nr +++ b/crates/nargo_cli/tests/test_data/8_integration/src/main.nr @@ -57,7 +57,7 @@ fn iterate1(mut a0: u32) -> u32{ } fn array_noteq(a: [u32; 4], b: [u32; 4]) { - constrain a != b; + assert(a != b); } fn test3(mut b: [Field; 4]) -> [Field; 4] { @@ -105,7 +105,7 @@ fn iterate3( mut hash: [u32; 8]) -> [u32; 8] { g = f; a = t1+t2; } - constrain a == 2470696267; + assert(a == 2470696267); hash[0] = hash[0] + a; hash[1] = hash[1] + b; hash[2] = hash[2] + c; @@ -126,7 +126,7 @@ fn test5() { sha_hash = iterate2(sha_hash); - constrain sha_hash[0] == 9; + assert(sha_hash[0] == 9); } @@ -244,31 +244,31 @@ fn sig1(x: u32) -> u32 { fn main(a: [u32; 100], b: [u32; 100], c: [u32; 4], mut d: [u32; 4], m: [u8; 32]) { let e = matrix_mul_10(a,b); - constrain e[6] == 1866842232; + assert(e[6] == 1866842232); let f = matrix_mul_2(c,d); - constrain f[3] == 2082554100; + assert(f[3] == 2082554100); let mut a = [1 as u32, 2, 3, 4]; a = test4(a); - constrain a[3] == 20; + assert(a[3] == 20); a = test4(c); - constrain a[3] == c[1] * 10; + assert(a[3] == c[1] * 10); d[0] += c[0]; d[0] += c[1]; - constrain d[0] == 2739986880; + assert(d[0] == 2739986880); let h = iterate1(1); - constrain h == 4; + assert(h == 4); let x = d; array_noteq(x, [d[0], d[1], d[2], 0]); let mut h5 = [d[0] as Field, d[1] as Field, d[2] as Field, d[3] as Field]; let t5 = test3(h5); - constrain t5[3] == 3; + assert(t5[3] == 3); h5 = test3(h5); - constrain h5[3] == 3; + assert(h5[3] == 3); test5(); @@ -279,5 +279,5 @@ fn main(a: [u32; 100], b: [u32; 100], c: [u32; 4], mut d: [u32; 4], m: [u8; 32]) sha_hash = iterate3(sha_hash); let h6 = test6(m); - constrain h6[0]== 523008072; //31.. 3800709683; + assert(h6[0]== 523008072); //31.. 3800709683 } diff --git a/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr b/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr index 0f37f3e92f4..48ac639ecf0 100644 --- a/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr +++ b/crates/nargo_cli/tests/test_data/9_conditional/src/main.nr @@ -17,11 +17,11 @@ fn call_intrinsic(x: [u8; 5], result: [u8; 32]) { let mut digest = std::hash::sha256(x); digest[0] = 5 as u8; digest = std::hash::sha256(x); - constrain digest == result; + assert(digest == result); } fn must_be_zero(x: u8) { - constrain x == 0; + assert(x == 0); } fn test3 (x: u8) { @@ -41,19 +41,19 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ let arr: [u8; 2] = [1, 2]; if arr[0] != arr[1] { for i in 0..1 { - constrain i != 2; + assert(i != 2); } } //Issue reported in #421 if a == c[0] { - constrain c[0] == 0; + assert(c[0] == 0); } else { if a == c[1] { - constrain c[1] == 0; + assert(c[1] == 0); } else { if a == c[2] { - constrain c[2] == 0; + assert(c[2] == 0); } } } @@ -67,25 +67,25 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ c1 = c1 + as_bits[0] as Field; if i == 0 { - constrain arr[i] == 1;// 1 + assert(arr[i] == 1);// 1 for k in 0..2 { - constrain as_bits_hardcode_1[k] == as_bits[k]; + assert(as_bits_hardcode_1[k] == as_bits[k]); } } if i == 1 { - constrain arr[i] == 2;//2 + assert(arr[i] == 2);//2 for k in 0..2 { - constrain as_bits_hardcode_1[k] != as_bits[k]; + assert(as_bits_hardcode_1[k] != as_bits[k]); } } } - constrain c1==1; + assert(c1==1); //Regression for Issue #579 let result1_true = test(true); - constrain result1_true.array_param[0] == 1; + assert(result1_true.array_param[0] == 1); let result1_false = test(false); - constrain result1_false.array_param[0] == 0; + assert(result1_false.array_param[0] == 0); //Test case for short-circuit let mut data = [0 as u32; 32]; @@ -104,23 +104,23 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } } } - constrain data[31] == 0; - constrain ba != 13; + assert(data[31] == 0); + assert(ba != 13); //regression for short-circuit2 if 35 == a { - constrain false; + assert(false); } bar(a as Field); if a == 3 { c = test4(); } - constrain c[1] != 2; + assert(c[1] != 2); call_intrinsic(x, result); //Test case for conditional with arrays from function parameters let b = sort([1,2,3,4]); - constrain b[0] == 1; + assert(b[0] == 1); if a == 0 { must_be_zero(0); @@ -130,9 +130,9 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ c[0] = 1; c[1] = c[2] / a + 11 % a; let f1 = a as Field; - constrain 10/f1 != 0; + assert(10/f1 != 0); } - constrain c[0] == 3; + assert(c[0] == 3); let mut y = 0; if a == 0 { @@ -141,9 +141,9 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } else { y = 5; } - constrain y == result[0]; + assert(y == result[0]); c = sort(c); - constrain c[0]==0; + assert(c[0]==0); //test 1 let mut x: u32 = 0; @@ -153,16 +153,16 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ x = 6; } else { x = 2; - constrain x == 2; + assert(x == 2); } } else { x = 5; - constrain x == 5; + assert(x == 5); } if c[0] == 0 { x = 3; } - constrain x == 2; + assert(x == 2); //test2: loops! x = 0; @@ -172,19 +172,19 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ x = i as u32 +2; } } - constrain x == 0; + assert(x == 0); test3(1); if a == 0 { c = test4(); } else { - constrain c[1] != 2; + assert(c[1] != 2); } if false { c[1] = 5; } - constrain c[1] == 2; + assert(c[1] == 2); test5(4); @@ -195,7 +195,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } else { c_661 = issue_661_foo(issue_661_bar(c), x); } - constrain c_661[0] < 20000; + assert(c_661[0] < 20000); // Test case for function synchronisation let mut c_sync = 0; @@ -204,7 +204,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ } else { c_sync = foo2() + foo2(); } - constrain c_sync == 6; + assert(c_sync == 6); // Regression for predicate simplification safe_inverse(0); @@ -213,7 +213,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]){ fn test5(a : u32) { if a > 1 { let q = a / 2; - constrain q == 2; + assert(q == 2); } } diff --git a/crates/nargo_cli/tests/test_data/array_len/src/main.nr b/crates/nargo_cli/tests/test_data/array_len/src/main.nr index 7ed9ebfd1c9..29ee44ce928 100644 --- a/crates/nargo_cli/tests/test_data/array_len/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_len/src/main.nr @@ -13,19 +13,19 @@ fn nested_call(b: [Field]) -> Field { } fn main(len3: [u8; 3], len4: [Field; 4]) { - constrain len_plus_1(len3) == 4; - constrain len_plus_1(len4) == 5; - constrain add_lens(len3, len4) == 7; - constrain nested_call(len4) == 5; + assert(len_plus_1(len3) == 4); + assert(len_plus_1(len4) == 5); + assert(add_lens(len3, len4) == 7); + assert(nested_call(len4) == 5); // std::array::len returns a comptime value - constrain len4[len3.len()] == 4; + assert(len4[len3.len()] == 4); // test for std::array::sort let mut unsorted = len3; unsorted[0] = len3[1]; unsorted[1] = len3[0]; - constrain unsorted[0] > unsorted[1]; + assert(unsorted[0] > unsorted[1]); let sorted = unsorted.sort(); - constrain sorted[0] < sorted[1]; + assert(sorted[0] < sorted[1]); } diff --git a/crates/nargo_cli/tests/test_data/array_neq/src/main.nr b/crates/nargo_cli/tests/test_data/array_neq/src/main.nr index 1fc5d9579c7..be734dea368 100644 --- a/crates/nargo_cli/tests/test_data/array_neq/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_neq/src/main.nr @@ -1,4 +1,4 @@ // Simple example of checking where two arrays are different fn main(a: [Field; 32], b: [Field; 32]) { - constrain a != b; + assert(a != b); } diff --git a/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr b/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr index 158da959352..b0626d63c8e 100644 --- a/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr +++ b/crates/nargo_cli/tests/test_data/assign_ex/src/main.nr @@ -1,6 +1,6 @@ fn main(x: Field, y: Field) { let mut z = x + y; - constrain z == 3; + assert(z == 3); z = x * y; - constrain z == 2; + assert(z == 2); } diff --git a/crates/nargo_cli/tests/test_data/bit_and/src/main.nr b/crates/nargo_cli/tests/test_data/bit_and/src/main.nr index 14b865d1a38..f4805960a33 100644 --- a/crates/nargo_cli/tests/test_data/bit_and/src/main.nr +++ b/crates/nargo_cli/tests/test_data/bit_and/src/main.nr @@ -4,15 +4,15 @@ fn main(x : Field, y : Field) { let x_as_u8 = x as u8; let y_as_u8 = y as u8; - constrain (x_as_u8 & y_as_u8) == x_as_u8; + assert((x_as_u8 & y_as_u8) == x_as_u8); //bitwise and with 1 bit: let flag = (x == 0) & (y == 16); - constrain flag; + assert(flag); //bitwise and with odd bits: let x_as_u11 = x as u11; let y_as_u11 = y as u11; - constrain (x_as_u11 & y_as_u11) == x_as_u11; + assert((x_as_u11 & y_as_u11) == x_as_u11); } diff --git a/crates/nargo_cli/tests/test_data/bool_not/src/main.nr b/crates/nargo_cli/tests/test_data/bool_not/src/main.nr index 035c0630874..d6b4d7a9fad 100644 --- a/crates/nargo_cli/tests/test_data/bool_not/src/main.nr +++ b/crates/nargo_cli/tests/test_data/bool_not/src/main.nr @@ -1,5 +1,5 @@ use dep::std; fn main(x: u1) { - constrain !x == 0; + assert(!x == 0); } diff --git a/crates/nargo_cli/tests/test_data/bool_or/src/main.nr b/crates/nargo_cli/tests/test_data/bool_or/src/main.nr index 147cc23a922..4a74027e4aa 100644 --- a/crates/nargo_cli/tests/test_data/bool_or/src/main.nr +++ b/crates/nargo_cli/tests/test_data/bool_or/src/main.nr @@ -1,7 +1,7 @@ use dep::std; fn main(x: u1, y: u1) { - constrain x | y == 1; + assert(x | y == 1); - constrain x | y | x == 1; + assert(x | y | x == 1); } diff --git a/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr b/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr index e62f4b80ddd..57af8120b33 100644 --- a/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr +++ b/crates/nargo_cli/tests/test_data/cast_bool/src/main.nr @@ -1,6 +1,6 @@ fn main(x: Field, y: Field) { let z = x == y; let t = z as u8; - constrain t == 1; + assert(t == 1); } diff --git a/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr b/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr index 0c8242bca4b..04f08bb70c5 100644 --- a/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr +++ b/crates/nargo_cli/tests/test_data/comptime_array_access/src/main.nr @@ -7,11 +7,11 @@ fn main(a: [Field; 3]) { // Nor should using it in an expression with a non-comptime variable. let two = i + ii; - constrain i == ii; + assert(i == ii); let elem2 = a[i]; - constrain elem1 == elem2; - constrain two == 2; + assert(elem1 == elem2); + assert(two == 2); } fn foo(x: Field) -> Field { x } diff --git a/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr b/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr index 9e861b5dc57..ad9ecc2f689 100644 --- a/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr +++ b/crates/nargo_cli/tests/test_data/comptime_fail/src/main.nr @@ -4,12 +4,12 @@ fn main(x: Field) { // Error here: let foo = my_const + x; - constrain array[foo] == x; + assert(array[foo] == x); let my_const2 = 3; - constrain array[my_const2] == 3; + assert(array[my_const2] == 3); // Using a comptime variable where a non-comptime variable is expected should be fine main(my_const2); - constrain x != 0; + assert(x != 0); } diff --git a/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr b/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr index 31d7d10975c..0461fd9c4cb 100644 --- a/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr +++ b/crates/nargo_cli/tests/test_data/comptime_recursion_regression/src/main.nr @@ -1,4 +1,4 @@ fn main(x: Field, y: Field) { let flag = (x == 1) | (y == 2); - constrain flag | false == flag; + assert(flag | false == flag); } diff --git a/crates/nargo_cli/tests/test_data/contracts/src/main.nr b/crates/nargo_cli/tests/test_data/contracts/src/main.nr index f236186d426..53e094eb4cc 100644 --- a/crates/nargo_cli/tests/test_data/contracts/src/main.nr +++ b/crates/nargo_cli/tests/test_data/contracts/src/main.nr @@ -1,5 +1,5 @@ fn main(x : Field, y : pub Field) { - constrain x * 2 == y * 3; + assert(x * 2 == y * 3); } contract Foo { diff --git a/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr b/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr index ee9e2e2eeee..3372e969d4b 100644 --- a/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr +++ b/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr @@ -23,26 +23,36 @@ fn main() { let p2_affine = Gaffine::new(16540640123574156134436876038791482806971768689494387082833631921987005038935, 20819045374670962167435360035096875258406992893633759881276124905556507972311); let p3_affine = bjj_affine.add(p1_affine, p2_affine); - constrain p3_affine.eq(Gaffine::new(7916061937171219682591368294088513039687205273691143098332585753343424131937, - 14035240266687799601661095864649209771790948434046947201833777492504781204499)); + assert( + p3_affine.eq(Gaffine::new( + 7916061937171219682591368294088513039687205273691143098332585753343424131937, + 14035240266687799601661095864649209771790948434046947201833777492504781204499 + )) + ); // Test scalar multiplication let p4_affine = bjj_affine.mul(2, p1_affine); - constrain p4_affine.eq(Gaffine::new(6890855772600357754907169075114257697580319025794532037257385534741338397365, - 4338620300185947561074059802482547481416142213883829469920100239455078257889)); - constrain p4_affine.eq(bjj_affine.bit_mul([0,1], p1_affine)); + assert( + p4_affine.eq(Gaffine::new( + 6890855772600357754907169075114257697580319025794532037257385534741338397365, + 4338620300185947561074059802482547481416142213883829469920100239455078257889 + )) + ); + assert(p4_affine.eq(bjj_affine.bit_mul([0,1], p1_affine))); // Test subtraction let p5_affine = bjj_affine.subtract(p3_affine, p3_affine); - constrain p5_affine.eq(Gaffine::zero()); + assert(p5_affine.eq(Gaffine::zero())); // Check that these points are on the curve - constrain bjj_affine.contains(bjj_affine.gen) - & bjj_affine.contains(p1_affine) - & bjj_affine.contains(p2_affine) - & bjj_affine.contains(p3_affine) - & bjj_affine.contains(p4_affine) - & bjj_affine.contains(p5_affine); + assert( + bjj_affine.contains(bjj_affine.gen) & + bjj_affine.contains(p1_affine) & + bjj_affine.contains(p2_affine) & + bjj_affine.contains(p3_affine) & + bjj_affine.contains(p4_affine) & + bjj_affine.contains(p5_affine) + ); // Test CurveGroup equivalents let bjj = bjj_affine.into_group(); // Baby Jubjub @@ -54,23 +64,25 @@ fn main() { let p5 = p5_affine.into_group(); // Test addition - constrain p3.eq(bjj.add(p1, p2)); + assert(p3.eq(bjj.add(p1, p2))); // Test scalar multiplication - constrain p4.eq(bjj.mul(2, p1)); - constrain p4.eq(bjj.bit_mul([0,1], p1)); + assert(p4.eq(bjj.mul(2, p1))); + assert(p4.eq(bjj.bit_mul([0,1], p1))); // Test subtraction - constrain G::zero().eq(bjj.subtract(p3, p3)); - constrain p5.eq(G::zero()); + assert(G::zero().eq(bjj.subtract(p3, p3))); + assert(p5.eq(G::zero())); // Check that these points are on the curve - constrain bjj.contains(bjj.gen) - & bjj.contains(p1) - & bjj.contains(p2) - & bjj.contains(p3) - & bjj.contains(p4) - & bjj.contains(p5); + assert( + bjj.contains(bjj.gen) & + bjj.contains(p1) & + bjj.contains(p2) & + bjj.contains(p3) & + bjj.contains(p4) & + bjj.contains(p5) + ); // Test SWCurve equivalents of the above // First the affine representation @@ -83,26 +95,32 @@ fn main() { let p5_swcurve_affine = bjj_affine.map_into_swcurve(p5_affine); // Addition - constrain p3_swcurve_affine.eq( - bjj_swcurve_affine.add( - p1_swcurve_affine, - p2_swcurve_affine)); + assert( + p3_swcurve_affine.eq( + bjj_swcurve_affine.add( + p1_swcurve_affine, + p2_swcurve_affine + ) + ) + ); // Doubling - constrain p4_swcurve_affine.eq(bjj_swcurve_affine.mul(2, p1_swcurve_affine)); - constrain p4_swcurve_affine.eq(bjj_swcurve_affine.bit_mul([0,1], p1_swcurve_affine)); + assert(p4_swcurve_affine.eq(bjj_swcurve_affine.mul(2, p1_swcurve_affine))); + assert(p4_swcurve_affine.eq(bjj_swcurve_affine.bit_mul([0,1], p1_swcurve_affine))); // Subtraction - constrain SWGaffine::zero().eq(bjj_swcurve_affine.subtract(p3_swcurve_affine, p3_swcurve_affine)); - constrain p5_swcurve_affine.eq(SWGaffine::zero()); + assert(SWGaffine::zero().eq(bjj_swcurve_affine.subtract(p3_swcurve_affine, p3_swcurve_affine))); + assert(p5_swcurve_affine.eq(SWGaffine::zero())); // Check that these points are on the curve - constrain bjj_swcurve_affine.contains(bjj_swcurve_affine.gen) - & bjj_swcurve_affine.contains(p1_swcurve_affine) - & bjj_swcurve_affine.contains(p2_swcurve_affine) - & bjj_swcurve_affine.contains(p3_swcurve_affine) - & bjj_swcurve_affine.contains(p4_swcurve_affine) - & bjj_swcurve_affine.contains(p5_swcurve_affine); + assert( + bjj_swcurve_affine.contains(bjj_swcurve_affine.gen) & + bjj_swcurve_affine.contains(p1_swcurve_affine) & + bjj_swcurve_affine.contains(p2_swcurve_affine) & + bjj_swcurve_affine.contains(p3_swcurve_affine) & + bjj_swcurve_affine.contains(p4_swcurve_affine) & + bjj_swcurve_affine.contains(p5_swcurve_affine) + ); // Then the CurveGroup representation let bjj_swcurve = bjj.into_swcurve(); @@ -114,26 +132,25 @@ fn main() { let p5_swcurve = bjj.map_into_swcurve(p5); // Addition - constrain p3_swcurve.eq( - bjj_swcurve.add( - p1_swcurve, - p2_swcurve)); + assert(p3_swcurve.eq(bjj_swcurve.add(p1_swcurve,p2_swcurve))); // Doubling - constrain p4_swcurve.eq(bjj_swcurve.mul(2, p1_swcurve)); - constrain p4_swcurve.eq(bjj_swcurve.bit_mul([0,1], p1_swcurve)); + assert(p4_swcurve.eq(bjj_swcurve.mul(2, p1_swcurve))); + assert(p4_swcurve.eq(bjj_swcurve.bit_mul([0,1], p1_swcurve))); // Subtraction - constrain SWG::zero().eq(bjj_swcurve.subtract(p3_swcurve, p3_swcurve)); - constrain p5_swcurve.eq(SWG::zero()); + assert(SWG::zero().eq(bjj_swcurve.subtract(p3_swcurve, p3_swcurve))); + assert(p5_swcurve.eq(SWG::zero())); // Check that these points are on the curve - constrain bjj_swcurve.contains(bjj_swcurve.gen) - & bjj_swcurve.contains(p1_swcurve) - & bjj_swcurve.contains(p2_swcurve) - & bjj_swcurve.contains(p3_swcurve) - & bjj_swcurve.contains(p4_swcurve) - & bjj_swcurve.contains(p5_swcurve); + assert( + bjj_swcurve.contains(bjj_swcurve.gen) & + bjj_swcurve.contains(p1_swcurve) & + bjj_swcurve.contains(p2_swcurve) & + bjj_swcurve.contains(p3_swcurve) & + bjj_swcurve.contains(p4_swcurve) & + bjj_swcurve.contains(p5_swcurve) + ); // Test MontCurve conversions // First the affine representation @@ -146,26 +163,25 @@ fn main() { let p5_montcurve_affine = p5_affine.into_montcurve(); // Addition - constrain p3_montcurve_affine.eq( - bjj_montcurve_affine.add( - p1_montcurve_affine, - p2_montcurve_affine)); + assert(p3_montcurve_affine.eq(bjj_montcurve_affine.add(p1_montcurve_affine, p2_montcurve_affine))); // Doubling - constrain p4_montcurve_affine.eq(bjj_montcurve_affine.mul(2, p1_montcurve_affine)); - constrain p4_montcurve_affine.eq(bjj_montcurve_affine.bit_mul([0,1], p1_montcurve_affine)); + assert(p4_montcurve_affine.eq(bjj_montcurve_affine.mul(2, p1_montcurve_affine))); + assert(p4_montcurve_affine.eq(bjj_montcurve_affine.bit_mul([0,1], p1_montcurve_affine))); // Subtraction - constrain MGaffine::zero().eq(bjj_montcurve_affine.subtract(p3_montcurve_affine, p3_montcurve_affine)); - constrain p5_montcurve_affine.eq(MGaffine::zero()); + assert(MGaffine::zero().eq(bjj_montcurve_affine.subtract(p3_montcurve_affine, p3_montcurve_affine))); + assert(p5_montcurve_affine.eq(MGaffine::zero())); // Check that these points are on the curve - constrain bjj_montcurve_affine.contains(bjj_montcurve_affine.gen) - & bjj_montcurve_affine.contains(p1_montcurve_affine) - & bjj_montcurve_affine.contains(p2_montcurve_affine) - & bjj_montcurve_affine.contains(p3_montcurve_affine) - & bjj_montcurve_affine.contains(p4_montcurve_affine) - & bjj_montcurve_affine.contains(p5_montcurve_affine); + assert( + bjj_montcurve_affine.contains(bjj_montcurve_affine.gen) & + bjj_montcurve_affine.contains(p1_montcurve_affine) & + bjj_montcurve_affine.contains(p2_montcurve_affine) & + bjj_montcurve_affine.contains(p3_montcurve_affine) & + bjj_montcurve_affine.contains(p4_montcurve_affine) & + bjj_montcurve_affine.contains(p5_montcurve_affine) + ); // Then the CurveGroup representation let bjj_montcurve = bjj.into_montcurve(); @@ -177,35 +193,34 @@ fn main() { let p5_montcurve = p5_montcurve_affine.into_group(); // Addition - constrain p3_montcurve.eq( - bjj_montcurve.add( - p1_montcurve, - p2_montcurve)); - + assert(p3_montcurve.eq(bjj_montcurve.add(p1_montcurve, p2_montcurve))); + // Doubling - constrain p4_montcurve.eq(bjj_montcurve.mul(2, p1_montcurve)); - constrain p4_montcurve.eq(bjj_montcurve.bit_mul([0,1], p1_montcurve)); + assert(p4_montcurve.eq(bjj_montcurve.mul(2, p1_montcurve))); + assert(p4_montcurve.eq(bjj_montcurve.bit_mul([0,1], p1_montcurve))); // Subtraction - constrain MG::zero().eq(bjj_montcurve.subtract(p3_montcurve, p3_montcurve)); - constrain p5_montcurve.eq(MG::zero()); + assert(MG::zero().eq(bjj_montcurve.subtract(p3_montcurve, p3_montcurve))); + assert(p5_montcurve.eq(MG::zero())); // Check that these points are on the curve - constrain bjj_montcurve.contains(bjj_montcurve.gen) - & bjj_montcurve.contains(p1_montcurve) - & bjj_montcurve.contains(p2_montcurve) - & bjj_montcurve.contains(p3_montcurve) - & bjj_montcurve.contains(p4_montcurve) - & bjj_montcurve.contains(p5_montcurve); + assert( + bjj_montcurve.contains(bjj_montcurve.gen) & + bjj_montcurve.contains(p1_montcurve) & + bjj_montcurve.contains(p2_montcurve) & + bjj_montcurve.contains(p3_montcurve) & + bjj_montcurve.contains(p4_montcurve) & + bjj_montcurve.contains(p5_montcurve) + ); // Elligator 2 map-to-curve let ell2_pt_map = bjj_affine.elligator2_map(27); - constrain ell2_pt_map.eq(MGaffine::new(7972459279704486422145701269802978968072470631857513331988813812334797879121, 8142420778878030219043334189293412482212146646099536952861607542822144507872).into_tecurve()); + assert(ell2_pt_map.eq(MGaffine::new(7972459279704486422145701269802978968072470631857513331988813812334797879121, 8142420778878030219043334189293412482212146646099536952861607542822144507872).into_tecurve())); // SWU map-to-curve let swu_pt_map = bjj_affine.swu_map(5,27); - constrain swu_pt_map.eq(bjj_affine.map_from_swcurve(SWGaffine::new(2162719247815120009132293839392097468339661471129795280520343931405114293888, 5341392251743377373758788728206293080122949448990104760111875914082289313973))); + assert(swu_pt_map.eq(bjj_affine.map_from_swcurve(SWGaffine::new(2162719247815120009132293839392097468339661471129795280520343931405114293888, 5341392251743377373758788728206293080122949448990104760111875914082289313973)))); } } diff --git a/crates/nargo_cli/tests/test_data/generics/src/main.nr b/crates/nargo_cli/tests/test_data/generics/src/main.nr index 56078a304e0..c506995adc3 100644 --- a/crates/nargo_cli/tests/test_data/generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/generics/src/main.nr @@ -5,7 +5,7 @@ struct Bar { } fn foo(bar: Bar) { - constrain bar.one == bar.two; + assert(bar.one == bar.two); } struct BigInt { @@ -15,12 +15,12 @@ struct BigInt { impl BigInt { // `N` is in scope of all methods in the impl fn first(first: BigInt, second: BigInt) -> Self { - constrain first.limbs != second.limbs; + assert(first.limbs != second.limbs); first } fn second(first: BigInt, second: Self) -> Self { - constrain first.limbs != second.limbs; + assert(first.limbs != second.limbs); second } } @@ -42,11 +42,11 @@ fn main(x: Field, y: Field) { let int1 = BigInt { limbs: [1] }; let int2 = BigInt { limbs: [2] }; let BigInt { limbs } = int1.second(int2).first(int1); - constrain limbs == int2.limbs; + assert(limbs == int2.limbs); // Test impl exclusively for Bar - constrain bar1.get_other() == bar1.other; + assert(bar1.get_other() == bar1.other); // Expected type error - // constrain bar2.get_other() == bar2.other; + // assert(bar2.get_other() == bar2.other); } diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr b/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr index 3471da43105..e52efc52eae 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/baz.nr @@ -1,5 +1,5 @@ fn from_baz(x : [Field; crate::foo::MAGIC_NUMBER]) { for i in 0..crate::foo::MAGIC_NUMBER { - constrain x[i] == crate::foo::MAGIC_NUMBER; + assert(x[i] == crate::foo::MAGIC_NUMBER); }; } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr b/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr index c54a85ae120..2db74fb1ff7 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/foo.nr @@ -6,6 +6,6 @@ global TYPE_INFERRED = 42; fn from_foo(x : [Field; bar::N]) { for i in 0..bar::N { - constrain x[i] == bar::N; + assert(x[i] == bar::N); }; } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/main.nr b/crates/nargo_cli/tests/test_data/global_consts/src/main.nr index fb48eb2b798..9bcca2b8071 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/main.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/main.nr @@ -16,14 +16,14 @@ fn main(a: [Field; M + N - N], b: [Field; 30 + N / 2], c : pub [Field; foo::MAGI let test_struct = Dummy { x: d, y: c }; for i in 0..foo::MAGIC_NUMBER { - constrain c[i] == foo::MAGIC_NUMBER; - constrain test_struct.y[i] == foo::MAGIC_NUMBER; + assert(c[i] == foo::MAGIC_NUMBER); + assert(test_struct.y[i] == foo::MAGIC_NUMBER); } - constrain N != M; + assert(N != M); let expected: u32 = 42; - constrain foo::TYPE_INFERRED == expected; + assert(foo::TYPE_INFERRED == expected); let mut y = 5; let mut x = M; @@ -33,30 +33,30 @@ fn main(a: [Field; M + N - N], b: [Field; 30 + N / 2], c : pub [Field; foo::MAGI y = i; } - constrain y == 24; - constrain x == 10; + assert(y == 24); + assert(x == 10); let q = multiplyByM(3); - constrain q == 96; + assert(q == 96); arrays_neq(a, b); let t: [Field; T_LEN] = [N, M]; - constrain t[1] == 32; + assert(t[1] == 32); - constrain 15 == mysubmodule::my_helper(); + assert(15 == mysubmodule::my_helper()); let add_submodules_N = mysubmodule::N + foo::bar::N; - constrain 15 == add_submodules_N; + assert(15 == add_submodules_N); let add_from_bar_N = mysubmodule::N + foo::bar::from_bar(1); - constrain 15 == add_from_bar_N; + assert(15 == add_from_bar_N); // Example showing an array filled with (mysubmodule::N + 2) 0's let sugared = [0; mysubmodule::N + 2]; - constrain sugared[mysubmodule::N + 1] == 0; + assert(sugared[mysubmodule::N + 1] == 0); let arr: [Field; mysubmodule::N] = [N; 10]; - constrain (arr[0] == 5) & (arr[9] == 5); + assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); baz::from_baz(c); @@ -67,7 +67,7 @@ fn multiplyByM(x: Field) -> Field { } fn arrays_neq(a: [Field; M], b: [Field; M]) { - constrain a != b; + assert(a != b); } mod mysubmodule { @@ -77,7 +77,7 @@ mod mysubmodule { global L: Field = 50; fn my_bool_or(x: u1, y: u1) { - constrain x | y == 1; + assert(x | y == 1); } fn my_helper() -> comptime Field { diff --git a/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr b/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr index 70b281951a8..572e6603cc5 100644 --- a/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr +++ b/crates/nargo_cli/tests/test_data/higher-order-functions/src/main.nr @@ -2,16 +2,16 @@ use dep::std; fn main() -> pub Field { let f = if 3 * 7 > 200 { foo } else { bar }; - constrain f()[1] == 2; + assert(f()[1] == 2); // Lambdas: - constrain twice(|x| x * 2, 5) == 20; - constrain (|x, y| x + y + 1)(2, 3) == 6; + assert(twice(|x| x * 2, 5) == 20); + assert((|x, y| x + y + 1)(2, 3) == 6); // Closures: let a = 42; let g = || a; - constrain g() == 42; + assert(g() == 42); // Mutable variables cannot be captured, but you can // copy them into immutable variables and capture those: @@ -22,7 +22,7 @@ fn main() -> pub Field { // Add extra mutations to ensure we can mutate x without the // captured z changing. x = x + 1; - constrain (|y| y + z)(1) == 4; + assert((|y| y + z)(1) == 4); x = x + 1; let ret = twice(add1, 3); @@ -34,18 +34,18 @@ fn main() -> pub Field { /// Test the array functions in std::array fn test_array_functions() { let myarray: [i32; 3] = [1, 2, 3]; - constrain myarray.any(|n| n > 2); + assert(myarray.any(|n| n > 2)); let evens: [i32; 3] = [2, 4, 6]; - constrain evens.all(|n| n > 1); + assert(evens.all(|n| n > 1)); - constrain evens.fold(0, |a, b| a + b) == 12; - constrain evens.reduce(|a, b| a + b) == 12; + assert(evens.fold(0, |a, b| a + b) == 12); + assert(evens.reduce(|a, b| a + b) == 12); let descending = myarray.sort_via(|a, b| a > b); - constrain descending == [3, 2, 1]; + assert(descending == [3, 2, 1]); - constrain evens.map(|n| n / 2) == myarray; + assert(evens.map(|n| n / 2) == myarray); } fn foo() -> [u32; 2] { diff --git a/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr b/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr index af04fc7bdf8..5105c18c7de 100644 --- a/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr +++ b/crates/nargo_cli/tests/test_data/if_else_chain/src/main.nr @@ -1,16 +1,16 @@ fn main(a: u32, mut c: [u32; 4]){ if a == c[0] { - constrain c[0] == 0; + assert(c[0] == 0); } else if a == c[1] { - constrain c[1] == 0; + assert(c[1] == 0); } else if a == c[2] { - constrain c[2] == 0; + assert(c[2] == 0); } else if a == c[3] { // expect to match this case - constrain c[3] == 0; + assert(c[3] == 0); } else { - constrain c[0] == 10; + assert(c[0] == 10); } } diff --git a/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr b/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr index 91a8db03ff3..0615a7dbca4 100644 --- a/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr +++ b/crates/nargo_cli/tests/test_data/main_bool_arg/src/main.nr @@ -1,8 +1,8 @@ fn main(x : bool, y: [bool;2]) { if x { - constrain 1 != 2; + assert(1 != 2); } - constrain x; - constrain y[0] != y[1]; + assert(x); + assert(y[0] != y[1]); } diff --git a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr index 3ab4efb64c0..53d876272ac 100644 --- a/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr +++ b/crates/nargo_cli/tests/test_data/merkle_insert/src/main.nr @@ -10,13 +10,13 @@ fn main( mimc_input: [Field; 4], ) { let old_leaf_exists = std::merkle::check_membership(old_root, old_leaf, index, old_hash_path); - constrain old_leaf_exists == 1; - constrain old_root == std::merkle::compute_root_from_leaf(old_leaf, index, old_hash_path); + assert(old_leaf_exists == 1); + assert(old_root == std::merkle::compute_root_from_leaf(old_leaf, index, old_hash_path)); let calculated_root = std::merkle::compute_merkle_root(leaf, index, old_hash_path); - constrain new_root == calculated_root; + assert(new_root == calculated_root); let h = std::hash::mimc_bn254(mimc_input); // Regression test for PR #891 std::println(h); - constrain h == 18226366069841799622585958305961373004333097209608110160936134895615261821931; + assert(h == 18226366069841799622585958305961373004333097209608110160936134895615261821931); } diff --git a/crates/nargo_cli/tests/test_data/modules/src/main.nr b/crates/nargo_cli/tests/test_data/modules/src/main.nr index 4a773c9ed6b..167f7e671a0 100644 --- a/crates/nargo_cli/tests/test_data/modules/src/main.nr +++ b/crates/nargo_cli/tests/test_data/modules/src/main.nr @@ -10,5 +10,5 @@ mod foo; // // To verify that proof, type `cargo run verify {proof_name}` fn main(x: Field, y: pub Field) { - constrain x != foo::hello(y); + assert(x != foo::hello(y)); } diff --git a/crates/nargo_cli/tests/test_data/modules_more/src/main.nr b/crates/nargo_cli/tests/test_data/modules_more/src/main.nr index 73f3a0a6d8b..8862e5a8650 100644 --- a/crates/nargo_cli/tests/test_data/modules_more/src/main.nr +++ b/crates/nargo_cli/tests/test_data/modules_more/src/main.nr @@ -2,5 +2,5 @@ mod foo; // An example of the module system fn main(x: Field, y: Field) { - constrain x != foo::bar::from_bar(y); + assert(x != foo::bar::from_bar(y)); } diff --git a/crates/nargo_cli/tests/test_data/modulus/src/main.nr b/crates/nargo_cli/tests/test_data/modulus/src/main.nr index 070d934976d..4a13a6e06ba 100644 --- a/crates/nargo_cli/tests/test_data/modulus/src/main.nr +++ b/crates/nargo_cli/tests/test_data/modulus/src/main.nr @@ -3,24 +3,24 @@ use dep::std; fn main(bn254_modulus_be_bytes : [u8; 32], bn254_modulus_be_bits : [u1; 254]) -> pub Field { let modulus_size = std::field::modulus_num_bits(); // NOTE: The constraints used in this circuit will only work when testing nargo with the plonk bn254 backend - constrain modulus_size == 254; + assert(modulus_size == 254); let modulus_be_byte_array = std::field::modulus_be_bytes(); for i in 0..32 { - constrain modulus_be_byte_array[i] == bn254_modulus_be_bytes[i]; + assert(modulus_be_byte_array[i] == bn254_modulus_be_bytes[i]); } let modulus_le_byte_array = std::field::modulus_le_bytes(); for i in 0..32 { - constrain modulus_le_byte_array[i] == bn254_modulus_be_bytes[31-i]; + assert(modulus_le_byte_array[i] == bn254_modulus_be_bytes[31-i]); } let modulus_be_bits = std::field::modulus_be_bits(); for i in 0..254 { - constrain modulus_be_bits[i] == bn254_modulus_be_bits[i]; + assert(modulus_be_bits[i] == bn254_modulus_be_bits[i]); } let modulus_le_bits = std::field::modulus_le_bits(); for i in 0..254 { - constrain modulus_le_bits[i] == bn254_modulus_be_bits[253-i]; + assert(modulus_le_bits[i] == bn254_modulus_be_bits[253-i]); } modulus_size diff --git a/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr b/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr index ebe50c4d0d9..f1efafc19fd 100644 --- a/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/numeric_generics/src/main.nr @@ -3,15 +3,15 @@ fn main() { let b = id([1, 2, 3]); let itWorks1 = MyStruct { data: a }; - constrain itWorks1.data[1] == 2; + assert(itWorks1.data[1] == 2); let itWorks2 = MyStruct { data: b }; - constrain itWorks2.data[1] == 2; + assert(itWorks2.data[1] == 2); let c = [1, 2]; let itAlsoWorks = MyStruct { data: c }; - constrain itAlsoWorks.data[1] == 2; + assert(itAlsoWorks.data[1] == 2); - constrain foo(itWorks2).data[0] == itWorks2.data[0] + 1; + assert(foo(itWorks2).data[0] == itWorks2.data[0] + 1); } fn id(x: [Field; I]) -> [Field; I] { @@ -25,7 +25,7 @@ struct MyStruct { impl MyStruct { fn insert(mut self: Self, index: comptime Field, elem: Field) -> Self { // Regression test for numeric generics on impls - constrain index as u64 < S as u64; + assert(index as u64 < S as u64); self.data[index] = elem; self diff --git a/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr b/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr index b727112ce55..37fc3f61188 100644 --- a/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr +++ b/crates/nargo_cli/tests/test_data/pedersen_check/src/main.nr @@ -2,8 +2,8 @@ use dep::std; fn main(x: Field, y: Field, salt: Field, out_x: Field, out_y: Field ) { let res = std::hash::pedersen([x, y]); - constrain res[0] == out_x; - constrain res[1] == out_y; + assert(res[0] == out_x); + assert(res[1] == out_y); let raw_data = [x,y]; let mut state = 0; @@ -12,6 +12,6 @@ fn main(x: Field, y: Field, salt: Field, out_x: Field, out_y: Field ) { } state += salt; let hash = std::hash::pedersen([state]); - constrain std::hash::pedersen([43])[0] == hash[0]; + assert(std::hash::pedersen([43])[0] == hash[0]); } diff --git a/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr index f2f1af7ab7d..37621c732a8 100644 --- a/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr +++ b/crates/nargo_cli/tests/test_data/poseidon_bn254_hash/src/main.nr @@ -3,8 +3,8 @@ use dep::std::hash::poseidon; fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { let hash1 = poseidon::bn254::hash_2(x1); - constrain hash1 == y1; + assert(hash1 == y1); let hash2 = poseidon::bn254::hash_4(x2); - constrain hash2 == y2; + assert(hash2 == y2); } diff --git a/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr index f5135897f19..3addc1cec97 100644 --- a/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr +++ b/crates/nargo_cli/tests/test_data/poseidonsponge_x5_254/src/main.nr @@ -5,5 +5,5 @@ fn main(x: [Field; 7]) // Test optimised sponge let result = poseidon::bn254::sponge(x); - constrain result == 0x080ae1669d62f0197190573d4a325bfb8d8fc201ce3127cbac0c47a7ac81ac48; + assert(result == 0x080ae1669d62f0197190573d4a325bfb8d8fc201ce3127cbac0c47a7ac81ac48); } diff --git a/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr b/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr index c9c43b56c07..c7986cb7af3 100644 --- a/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr +++ b/crates/nargo_cli/tests/test_data/pred_eq/src/main.nr @@ -2,5 +2,5 @@ use dep::std; fn main(x: Field, y: Field) { let p = x == y; - constrain p == true; + assert(p == true); } diff --git a/crates/nargo_cli/tests/test_data/regression/src/main.nr b/crates/nargo_cli/tests/test_data/regression/src/main.nr index 2fcf41c8d7f..06e35827d1e 100644 --- a/crates/nargo_cli/tests/test_data/regression/src/main.nr +++ b/crates/nargo_cli/tests/test_data/regression/src/main.nr @@ -2,8 +2,8 @@ global NIBBLE_LENGTH: comptime Field = 16; fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) { - constrain 2*input.len() as u64 <= NIBBLE_LENGTH as u64; - constrain length as u64 <= input.len() as u64; + assert(2*input.len() as u64 <= NIBBLE_LENGTH as u64); + assert(length as u64 <= input.len() as u64); let mut nibble = [0 as u4; NIBBLE_LENGTH]; @@ -43,7 +43,7 @@ fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Fie fn enc(value: [u8; N], value_length: Field) -> ([u8; 32], Field) { - constrain value.len() as u8 >= value_length as u8; + assert(value.len() as u8 >= value_length as u8); let mut out_value = [0; 32]; if value_length == 0 { @@ -75,8 +75,8 @@ fn main(x: [u8; 5], z: Field) { //Issue 1144 let (nib, len) = compact_decode(x,z); - constrain len == 5; - constrain [nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]; + assert(len == 5); + assert([nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]); } @@ -96,6 +96,6 @@ fn enc_test() let enc_val1 = enc(val1,val1_length); - constrain enc_val1.0 == [0x94,0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]; - constrain enc_val1.1 == 21; + assert(enc_val1.0 == [0x94,0xb8,0x8f,0x61,0xe6,0xfb,0xda,0x83,0xfb,0xff,0xfa,0xbe,0x36,0x41,0x12,0x13,0x74,0x80,0x39,0x80,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]); + assert(enc_val1.1 == 21); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr b/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr index 72e022edc8e..d9d267f1dcd 100644 --- a/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr +++ b/crates/nargo_cli/tests/test_data/scalar_mul/src/main.nr @@ -17,6 +17,6 @@ fn main( pub_y = b_pub_y; } let res = std::scalar_mul::fixed_base(priv_key); - constrain res[0] == pub_x; - constrain res[1] == pub_y; + assert(res[0] == pub_x); + assert(res[1] == pub_y); } diff --git a/crates/nargo_cli/tests/test_data/schnorr/src/main.nr b/crates/nargo_cli/tests/test_data/schnorr/src/main.nr index 39676ca7389..ec4f819858a 100644 --- a/crates/nargo_cli/tests/test_data/schnorr/src/main.nr +++ b/crates/nargo_cli/tests/test_data/schnorr/src/main.nr @@ -6,5 +6,5 @@ fn main(message: [u8; 10], pub_key_x: Field, pub_key_y: Field, signature: [u8; 6 // Is there ever a situation where someone would want // to ensure that a signature was invalid? let x = std::schnorr::verify_signature(pub_key_x,pub_key_y,signature, message); - constrain x == 1; + assert(x == 1); } diff --git a/crates/nargo_cli/tests/test_data/sha256/src/main.nr b/crates/nargo_cli/tests/test_data/sha256/src/main.nr index bf2249c4faf..fd5340e2384 100644 --- a/crates/nargo_cli/tests/test_data/sha256/src/main.nr +++ b/crates/nargo_cli/tests/test_data/sha256/src/main.nr @@ -15,5 +15,5 @@ fn main(x: Field, result: [u8; 32]) { // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field // The padding is taken care of by the program let digest = std::hash::sha256([x as u8]); - constrain digest == result; + assert(digest == result); } diff --git a/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr b/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr index 7e687cd179b..fcdcdb8684f 100644 --- a/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr +++ b/crates/nargo_cli/tests/test_data/sha2_blocks/src/main.nr @@ -5,18 +5,18 @@ fn main(x: [u8; 3], result256: [u8; 32], result512: [u8; 64]) { // One-block tests. let mut digest256 = std::sha256::digest(x); - constrain digest256 == result256; + assert(digest256 == result256); let mut digest512 = std::sha512::digest(x); - constrain digest512 == result512; + assert(digest512 == result512); // Two-block SHA256 test. Taken from https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/SHA256.pdf let y: [u8; 56] = [97,98,99,100,98,99,100,101,99,100,101,102,100,101,102,103,101,102,103,104,102,103,104,105,103,104,105,106,104,105,106,107,105,106,107,108,106,107,108,109,107,108,109,110,108,109,110,111,109,110,111,112,110,111,112,113]; // "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" digest256 = std::sha256::digest(y); - constrain digest256 == [36,141,106,97,210,6,56,184,229,192,38,147,12,62,96,57,163,60,228,89,100,255,33,103,246,236,237,212,25,219,6,193]; + assert(digest256 == [36,141,106,97,210,6,56,184,229,192,38,147,12,62,96,57,163,60,228,89,100,255,33,103,246,236,237,212,25,219,6,193]); // Two-block SHA256 test. Taken from https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/SHA512.pdf let z: [u8; 112] = [97,98,99,100,101,102,103,104,98,99,100,101,102,103,104,105,99,100,101,102,103,104,105,106,100,101,102,103,104,105,106,107,101,102,103,104,105,106,107,108,102,103,104,105,106,107,108,109,103,104,105,106,107,108,109,110,104,105,106,107,108,109,110,111,105,106,107,108,109,110,111,112,106,107,108,109,110,111,112,113,107,108,109,110,111,112,113,114,108,109,110,111,112,113,114,115,109,110,111,112,113,114,115,116,110,111,112,113,114,115,116,117]; // "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu" digest512 = std::sha512::digest(z); - constrain digest512 == [142,149,155,117,218,227,19,218,140,244,247,40,20,252,20,63,143,119,121,198,235,159,127,161,114,153,174,173,182,136,144,24,80,29,40,158,73,0,247,228,51,27,153,222,196,181,67,58,199,211,41,238,182,221,38,84,94,150,229,91,135,75,233,9]; + assert(digest512 == [142,149,155,117,218,227,19,218,140,244,247,40,20,252,20,63,143,119,121,198,235,159,127,161,114,153,174,173,182,136,144,24,80,29,40,158,73,0,247,228,51,27,153,222,196,181,67,58,199,211,41,238,182,221,38,84,94,150,229,91,135,75,233,9]); } diff --git a/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr b/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr index 3458862b646..a7cc9daebb9 100644 --- a/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr +++ b/crates/nargo_cli/tests/test_data/sha2_byte/src/main.nr @@ -4,8 +4,8 @@ use dep::std; fn main(x: Field, result256: [u8; 32], result512: [u8; 64]) { let digest256 = std::sha256::digest([x as u8]); - constrain digest256 == result256; + assert(digest256 == result256); let digest512 = std::sha512::digest([x as u8]); - constrain digest512 == result512; + assert(digest512 == result512); } diff --git a/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr b/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr index 20d41481c7e..65d0ec954ac 100644 --- a/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr +++ b/crates/nargo_cli/tests/test_data/simple_shield/src/main.nr @@ -30,7 +30,7 @@ fn main( // Check that the input note nullifier is in the root let is_member = std::merkle::check_membership(note_root, note_commitment[0], index, note_hash_path); - constrain is_member == 1; + assert(is_member == 1); [nullifier[0], receiver_note_commitment[0]] } diff --git a/crates/nargo_cli/tests/test_data/strings/src/main.nr b/crates/nargo_cli/tests/test_data/strings/src/main.nr index ca0d1691f86..bee2370201c 100644 --- a/crates/nargo_cli/tests/test_data/strings/src/main.nr +++ b/crates/nargo_cli/tests/test_data/strings/src/main.nr @@ -3,7 +3,7 @@ use dep::std; fn main(message : pub str<11>, y : Field, hex_as_string : str<4>, hex_as_field : Field) { let mut bad_message = "hello world"; - constrain message == "hello world"; + assert(message == "hello world"); bad_message = "helld world"; let x = 10; let z = x * 5; @@ -13,18 +13,18 @@ fn main(message : pub str<11>, y : Field, hex_as_string : str<4>, hex_as_field : std::println(x); let array = [1, 2, 3, 5, 8]; - constrain y == 5; // Change to y != 5 to see how the later print statements are not called + assert(y == 5); // Change to y != 5 to see how the later print statements are not called std::println(array); std::println(bad_message); - constrain message != bad_message; + assert(message != bad_message); let hash = std::hash::pedersen([x]); std::println(hash); - constrain hex_as_string == "0x41"; - // constrain hex_as_string != 0x41; This will fail with a type mismatch between str[4] and Field - constrain hex_as_field == 0x41; + assert(hex_as_string == "0x41"); + // assert(hex_as_string != 0x41); This will fail with a type mismatch between str[4] and Field + assert(hex_as_field == 0x41); } #[test] diff --git a/crates/nargo_cli/tests/test_data/struct/src/main.nr b/crates/nargo_cli/tests/test_data/struct/src/main.nr index b426339c1e4..6d61393920d 100644 --- a/crates/nargo_cli/tests/test_data/struct/src/main.nr +++ b/crates/nargo_cli/tests/test_data/struct/src/main.nr @@ -58,20 +58,20 @@ fn main(x: Field, y: Field) { let first = Foo::default(x,y); let p = Pair { first, second: 1 }; - constrain p.bar() == x; - constrain p.second == y; - constrain p.first.array[0] != p.first.array[1]; + assert(p.bar() == x); + assert(p.second == y); + assert(p.first.array[0] != p.first.array[1]); // Nested structs let (struct_from_tuple, a_bool) = test_struct_in_tuple(true,x,y); - constrain struct_from_tuple.my_bool == true; - constrain a_bool == true; - constrain struct_from_tuple.my_int == 5; - constrain struct_from_tuple.my_nest.a == 0; + assert(struct_from_tuple.my_bool == true); + assert(a_bool == true); + assert(struct_from_tuple.my_int == 5); + assert(struct_from_tuple.my_nest.a == 0); // Regression test for issue #670 let Animal { legs, eyes } = get_dog(); let six = legs + eyes as Field; - constrain six == 6; + assert(six == 6); } diff --git a/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr b/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr index e022f26947c..fe77ed6eee6 100644 --- a/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr +++ b/crates/nargo_cli/tests/test_data/struct_inputs/src/main.nr @@ -14,23 +14,23 @@ fn main(x : Field, y : pub myStruct, z: pub foo::bar::barStruct, a: pub foo::foo check_inner_struct(a, z); for i in 0 .. struct_from_bar.array.len() { - constrain struct_from_bar.array[i] == z.array[i]; + assert(struct_from_bar.array[i] == z.array[i]); } - constrain z.val == struct_from_bar.val; + assert(z.val == struct_from_bar.val); - constrain (struct_from_bar.val * x) == x; + assert((struct_from_bar.val * x) == x); - constrain x != y.bar; + assert(x != y.bar); - constrain y.message == "hello"; - constrain a.bar_struct.message == struct_from_bar.message; + assert(y.message == "hello"); + assert(a.bar_struct.message == struct_from_bar.message); a.bar_struct.array[1] } fn check_inner_struct(a: foo::fooStruct, z: foo::bar::barStruct) { - constrain a.bar_struct.val == z.val; + assert(a.bar_struct.val == z.val); for i in 0.. a.bar_struct.array.len() { - constrain a.bar_struct.array[i] == z.array[i]; + assert(a.bar_struct.array[i] == z.array[i]); } } diff --git a/crates/nargo_cli/tests/test_data/submodules/src/main.nr b/crates/nargo_cli/tests/test_data/submodules/src/main.nr index 90c778db111..9bfe382663f 100644 --- a/crates/nargo_cli/tests/test_data/submodules/src/main.nr +++ b/crates/nargo_cli/tests/test_data/submodules/src/main.nr @@ -9,7 +9,7 @@ mod mysubmodule { use dep::std; fn my_bool_or(x: u1, y: u1) { - constrain x | y == 1; + assert(x | y == 1); } fn my_helper() {} diff --git a/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr b/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr index 1253656217d..f5831e8c524 100644 --- a/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr +++ b/crates/nargo_cli/tests/test_data/to_be_bytes/src/main.nr @@ -7,8 +7,8 @@ fn main(x : Field) -> pub [u8; 31] { for i in 0..31 { bytes[i] = byte_array[i]; } - constrain bytes[30] == 60; - constrain bytes[29] == 33; - constrain bytes[28] == 31; + assert(bytes[30] == 60); + assert(bytes[29] == 33); + assert(bytes[28] == 31); bytes } diff --git a/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr b/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr index 6f57b407da7..1932b7556a8 100644 --- a/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr +++ b/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr @@ -7,8 +7,8 @@ fn main(x : Field) { // The result of this byte array will be little-endian let le_byte_array = x.to_le_bytes(31); - constrain le_byte_array[0] == 60; - constrain le_byte_array[0] == be_byte_array[30]; - constrain le_byte_array[1] == be_byte_array[29]; - constrain le_byte_array[2] == be_byte_array[28]; + assert(le_byte_array[0] == 60); + assert(le_byte_array[0] == be_byte_array[30]); + assert(le_byte_array[1] == be_byte_array[29]); + assert(le_byte_array[2] == be_byte_array[28]); } \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/tuples/src/main.nr b/crates/nargo_cli/tests/test_data/tuples/src/main.nr index ce25b9171cd..4a003dc5a42 100644 --- a/crates/nargo_cli/tests/test_data/tuples/src/main.nr +++ b/crates/nargo_cli/tests/test_data/tuples/src/main.nr @@ -2,18 +2,18 @@ use dep::std; fn main(x: Field, y: Field) { let pair = (x, y); - constrain pair.0 == 1; - constrain pair.1 == 0; + assert(pair.0 == 1); + assert(pair.1 == 0); let (a, b) = if true { (0, 1) } else { (2, 3) }; - constrain a == 0; - constrain b == 1; + assert(a == 0); + assert(b == 1); let (u,v) = if x as u32 <1 { (x,x+1) } else { (x+1,x) }; - constrain u==x+1; - constrain v==x; + assert(u==x+1); + assert(v==x); } diff --git a/crates/nargo_cli/tests/test_data/xor/src/main.nr b/crates/nargo_cli/tests/test_data/xor/src/main.nr index cc7caf17fad..e893c938fc3 100644 --- a/crates/nargo_cli/tests/test_data/xor/src/main.nr +++ b/crates/nargo_cli/tests/test_data/xor/src/main.nr @@ -1,5 +1,5 @@ fn main(x : u32, y : pub u32) { let m = x ^ y; - constrain m != 10; + assert(m != 10); } \ No newline at end of file diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index 98cf5993edf..f03bcefeb2d 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -1357,7 +1357,7 @@ mod test { let src = r#" fn main(x : Field) { let y = x + x; - constrain y == x; + assert(y == x); } "#; @@ -1369,7 +1369,7 @@ mod test { let src = r#" fn main(x : Field) { let y = x + x; - constrain x == x; + assert(x == x); } "#; @@ -1392,7 +1392,7 @@ mod test { let src = r#" fn main(x : Field) { let y = x + x; - constrain y == z; + assert(y == z); } "#; @@ -1428,7 +1428,7 @@ mod test { let src = r#" fn main(x : Field) { let y = 5; - constrain y == x; + assert(y == x); } "#; diff --git a/examples_failing/ecdsa_secp256k1/src/main.nr b/examples_failing/ecdsa_secp256k1/src/main.nr index 6ee7a98a89d..43a4f78e634 100644 --- a/examples_failing/ecdsa_secp256k1/src/main.nr +++ b/examples_failing/ecdsa_secp256k1/src/main.nr @@ -13,5 +13,5 @@ fn main(hashed_message : [32]u8, pub_key_x : [32]u8, pub_key_y : [32]u8, signatu // Is there ever a situation where someone would want // to ensure that a signature was invalid? let x = std::ecdsa_secp256k1::verify_signature(pub_key_x,pub_key_y,signature, hashed_message); - constrain x == 1; + assert(x == 1); } diff --git a/examples_failing/pow_const/src/main.nr b/examples_failing/pow_const/src/main.nr index 4355935d2ad..9b742417e15 100644 --- a/examples_failing/pow_const/src/main.nr +++ b/examples_failing/pow_const/src/main.nr @@ -1,6 +1,6 @@ use dep::std; fn main(_x : Field) { - constrain std::pow_const(2,3) == _x; + assert(std::pow_const(2,3) == _x); } diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index fad5e5e0a97..e917661f0f1 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -82,13 +82,13 @@ mod affine { // Curve constructor fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients - constrain k != 0; - constrain j*j != 4; + assert(k != 0); + assert(j*j != 4); let curve = Self {j, k, gen}; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } @@ -180,10 +180,10 @@ mod affine { let z = ZETA; // Non-square Field element required for map // Check whether curve is admissible - constrain j != 0; + assert(j != 0); let l = (j*j - 4)/(k*k); - constrain l != 0; - constrain is_square(l) == false; + assert(l != 0); + assert(is_square(l) == false); let x1 = safe_inverse(1+z*u*u)*(0 - (j/k)); @@ -284,13 +284,13 @@ mod curvegroup { // Curve constructor fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients - constrain k != 0; - constrain j*j != 4; + assert(k != 0); + assert(j*j != 4); let curve = Self {j, k, gen}; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index 8e2a996e927..eae4f375e43 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -71,12 +71,12 @@ mod affine { // Curve constructor fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - constrain 4*a*a*a + 27*b*b != 0; + assert(4*a*a*a + 27*b*b != 0); let curve = Curve { a, b, gen }; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } @@ -164,7 +164,7 @@ mod affine { // where g(x) = x^3 + a*x + b. swu_map(c,z,.) then maps a Field element to a point on curve c. fn swu_map(self, z: Field, u: Field) -> Point { // Check whether curve is admissible - constrain self.a*self.b != 0; + assert(self.a*self.b != 0); let Curve {a, b, gen: _gen} = self; @@ -248,12 +248,12 @@ mod curvegroup { // Curve constructor fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - constrain 4*a*a*a + 27*b*b != 0; + assert(4*a*a*a + 27*b*b != 0); let curve = Curve { a, b, gen }; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 43c9f5d2017..8611e4270c3 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -81,12 +81,12 @@ mod affine { // Curve constructor fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - constrain a*d*(a-d) != 0; + assert(a*d*(a-d) != 0); let curve = Curve {a, d, gen}; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } @@ -286,12 +286,12 @@ mod curvegroup { // Curve constructor fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - constrain a*d*(a-d) != 0; + assert(a*d*(a-d) != 0); let curve = Curve { a, d, gen }; // gen should be on the curve - constrain curve.contains(curve.gen); + assert(curve.contains(curve.gen)); curve } diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index 7ac365c4995..416f740bbdf 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -20,9 +20,9 @@ fn config( mds: [Field; N]) -> PoseidonConfig { // Input checks - constrain t as u8 * (rf + rp) == ark.len() as u8; - constrain t * t == mds.len(); - constrain alpha != 0; + assert(t as u8 * (rf + rp) == ark.len() as u8); + assert(t * t == mds.len()); + assert(alpha != 0); PoseidonConfig {t, rf, rp, alpha, ark, mds} } @@ -34,7 +34,7 @@ fn permute( -> [Field; O] { let PoseidonConfig {t, rf, rp, alpha, ark, mds} = pos_conf; - constrain t == state.len(); + assert(t == state.len()); let mut count = 0; @@ -68,7 +68,7 @@ fn absorb( capacity: comptime Field, // Capacity; usually 1 msg: [Field; P]) // Arbitrary length message -> [Field; O] { - constrain pos_conf.t == rate + capacity; + assert(pos_conf.t == rate + capacity); let mut i = 0; diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 355e7d13a5f..9ba26dbd878 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -15,9 +15,9 @@ fn permute( let rf = 8; let rp = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; - constrain t == state.len(); - constrain rf == config_rf as Field; - constrain rp == config_rp as Field; + assert(t == state.len()); + assert(rf == config_rf as Field); + assert(rp == config_rp as Field); let mut count = 0; @@ -73,7 +73,7 @@ fn absorb( msg: [Field; P] // Arbitrary length message ) -> [Field; O] { - constrain pos_conf.t == rate + capacity; + assert(pos_conf.t == rate + capacity); let mut i = 0; From 62b7496c450fbf105e405aa463c3e796de92a428 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 16:29:46 +0100 Subject: [PATCH 30/66] chore: Replace explicit if-elses with `FieldElement::from()` for boolean fields (#1266) * chore: replace if-elses with `FieldElement::from()` * chore: replace explicit equality with usage of `is_zero()` * chore: replace explicit usage of `from` with `.into()` --- crates/noirc_abi/src/input_parser/toml.rs | 14 +------ .../src/ssa/acir_gen/operations/bitwise.rs | 4 +- crates/noirc_evaluator/src/ssa/node.rs | 42 +++++++++++-------- 3 files changed, 28 insertions(+), 32 deletions(-) diff --git a/crates/noirc_abi/src/input_parser/toml.rs b/crates/noirc_abi/src/input_parser/toml.rs index 180cde4bf78..a737f784031 100644 --- a/crates/noirc_abi/src/input_parser/toml.rs +++ b/crates/noirc_abi/src/input_parser/toml.rs @@ -115,11 +115,7 @@ impl InputValue { InputValue::Field(new_value) } - TomlTypes::Bool(boolean) => { - let new_value = if boolean { FieldElement::one() } else { FieldElement::zero() }; - - InputValue::Field(new_value) - } + TomlTypes::Bool(boolean) => InputValue::Field(boolean.into()), TomlTypes::ArrayNum(arr_num) => { let array_elements = vecmap(arr_num, |elem_num| FieldElement::from(i128::from(elem_num))); @@ -132,13 +128,7 @@ impl InputValue { InputValue::Vec(array_elements) } TomlTypes::ArrayBool(arr_bool) => { - let array_elements = vecmap(arr_bool, |elem_bool| { - if elem_bool { - FieldElement::one() - } else { - FieldElement::zero() - } - }); + let array_elements = vecmap(arr_bool, FieldElement::from); InputValue::Vec(array_elements) } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs index f8ca271835e..00396f4d4b6 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/bitwise.rs @@ -40,8 +40,8 @@ pub(super) fn simplify_bitwise( let max = FieldElement::from((1_u128 << bit_size) - 1); let (field, var) = match (lhs.to_const(), rhs.to_const()) { - (Some(l_c), None) => (l_c == FieldElement::zero() || l_c == max).then_some((l_c, rhs))?, - (None, Some(r_c)) => (r_c == FieldElement::zero() || r_c == max).then_some((r_c, lhs))?, + (Some(l_c), None) => (l_c.is_zero() || l_c == max).then_some((l_c, rhs))?, + (None, Some(r_c)) => (r_c.is_zero() || r_c == max).then_some((r_c, lhs))?, _ => return None, }; diff --git a/crates/noirc_evaluator/src/ssa/node.rs b/crates/noirc_evaluator/src/ssa/node.rs index 8819a96e1c3..bec3c923a6d 100644 --- a/crates/noirc_evaluator/src/ssa/node.rs +++ b/crates/noirc_evaluator/src/ssa/node.rs @@ -918,8 +918,10 @@ impl Binary { !res_type.is_native_field(), "ICE: comparisons are not implemented for field elements" ); - let res = if lhs < rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs < rhs), + ObjectType::boolean(), + )); } } BinaryOp::Ule => { @@ -931,8 +933,10 @@ impl Binary { !res_type.is_native_field(), "ICE: comparisons are not implemented for field elements" ); - let res = if lhs <= rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs <= rhs), + ObjectType::boolean(), + )); } } BinaryOp::Slt => (), @@ -942,8 +946,10 @@ impl Binary { return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); //n.b we assume the type of lhs and rhs is unsigned because of the opcode, we could also verify this } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - let res = if lhs < rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs < rhs), + ObjectType::boolean(), + )); } } BinaryOp::Lte => { @@ -951,30 +957,30 @@ impl Binary { return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); //n.b we assume the type of lhs and rhs is unsigned because of the opcode, we could also verify this } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - let res = if lhs <= rhs { FieldElement::one() } else { FieldElement::zero() }; - return Ok(NodeEval::Const(res, ObjectType::boolean())); + return Ok(NodeEval::Const( + FieldElement::from(lhs <= rhs), + ObjectType::boolean(), + )); } } BinaryOp::Eq => { if self.lhs == self.rhs { return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - if lhs == rhs { - return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); - } else { - return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); - } + return Ok(NodeEval::Const( + FieldElement::from(lhs == rhs), + ObjectType::boolean(), + )); } } BinaryOp::Ne => { if self.lhs == self.rhs { return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); } else if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - if lhs != rhs { - return Ok(NodeEval::Const(FieldElement::one(), ObjectType::boolean())); - } else { - return Ok(NodeEval::Const(FieldElement::zero(), ObjectType::boolean())); - } + return Ok(NodeEval::Const( + FieldElement::from(lhs != rhs), + ObjectType::boolean(), + )); } } BinaryOp::And => { From 4422bed143508e37cda6fb218f1cf62921fc0f6f Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 16:37:39 +0100 Subject: [PATCH 31/66] feat: add integration tests for bitshift operators (#1272) * chore: add test for bitshift operators * chore: update error message to flag up that runtime shifts are not implemented yet * chore: split runtime and comptime bitshift tests * chore: replace `constrain` with `assert()` --- .../tests/test_data/bit_shifts_comptime/Nargo.toml | 5 +++++ .../tests/test_data/bit_shifts_comptime/Prover.toml | 1 + .../tests/test_data/bit_shifts_comptime/src/main.nr | 13 +++++++++++++ .../tests/test_data/bit_shifts_runtime/Nargo.toml | 5 +++++ .../tests/test_data/bit_shifts_runtime/Prover.toml | 2 ++ .../tests/test_data/bit_shifts_runtime/src/main.nr | 12 ++++++++++++ crates/nargo_cli/tests/test_data/config.toml | 2 +- .../src/ssa/acir_gen/operations/binary.rs | 2 +- 8 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml new file mode 100644 index 00000000000..cfd62c406cb --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/Prover.toml @@ -0,0 +1 @@ +x = 64 diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr new file mode 100644 index 00000000000..c1c6890febb --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_comptime/src/main.nr @@ -0,0 +1,13 @@ +fn main(x: u64) { + let two: u64 = 2; + let three: u64 = 3; + + // comptime shifts on comptime values + assert(two << 2 == 8); + assert((two << 3) / 8 == two); + assert((three >> 1) == 1); + + // comptime shifts on runtime values + assert(x << 1 == 128); + assert(x >> 2 == 16); +} diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml new file mode 100644 index 00000000000..67bf6a6a234 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml @@ -0,0 +1,2 @@ +x = 64 +y = 1 diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr new file mode 100644 index 00000000000..903a5f35463 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr @@ -0,0 +1,12 @@ +fn main(x: u64, y: u64) { + // These are currently unimplemented and panic with "ShiftLeft and ShiftRight operations with shifts which are only known at runtime are not yet implemented." + // See: https://github.com/noir-lang/noir/issues/1265 + + // runtime shifts on comptime values + assert(64 << y == 128); + assert(64 >> y == 32); + + // runtime shifts on runtime values + assert(x << y == 128); + assert(x >> y == 32); +} diff --git a/crates/nargo_cli/tests/test_data/config.toml b/crates/nargo_cli/tests/test_data/config.toml index 1c7536af5a2..80822d22375 100644 --- a/crates/nargo_cli/tests/test_data/config.toml +++ b/crates/nargo_cli/tests/test_data/config.toml @@ -2,7 +2,7 @@ # "1_mul", "2_div","3_add","4_sub","5_over", "6","6_array", "7_function","7","8_integration", "9_conditional", "10_slices", "assign_ex", "bool_not", "bool_or", "pedersen_check", "poseidonperm_x5_254", "poseidonsponge_x5_254", "pred_eq", "schnorr", "sha256", "tuples", # "array_len", "array_neq", "bit_and", "cast_bool", "comptime_array_access", "generics", "global_comptime", "main_bool_arg", "main_return", "merkle_insert", "modules", "modules_more", "scalar_mul", "simple_shield", "struct", "submodules", # Exclude "poseidonsponge_x5_254" and "sha2_byte" due to relatively long computation time and "sha2_blocks" due to very long computation time. -exclude = ["comptime_fail", "poseidonsponge_x5_254", "sha2_blocks", "sha2_byte"] +exclude = ["bit_shifts_runtime", "comptime_fail", "poseidonsponge_x5_254", "sha2_blocks", "sha2_byte"] # List of tests (as their directory name in test_data) expecting to fail: if the test pass, we report an error. diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs index 87280eb1fde..166a55b0d52 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/binary.rs @@ -238,7 +238,7 @@ pub(crate) fn evaluate( }; InternalVar::from(bitwise_result) } - BinaryOp::Shl | BinaryOp::Shr(_) => unreachable!("ICE: ShiftLeft and ShiftRight are replaced by multiplications and divisions in optimization pass."), + BinaryOp::Shl | BinaryOp::Shr(_) => todo!("ShiftLeft and ShiftRight operations with shifts which are only known at runtime are not yet implemented."), i @ BinaryOp::Assign => unreachable!("Invalid Instruction: {:?}", i), }; Some(binary_output) From f144391b4295b127f3f422e862a087a90dac1dbf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 2 May 2023 17:36:22 +0100 Subject: [PATCH 32/66] feat(nargo): Remove usage of `CompiledProgram` in CLI code and use separate ABI/bytecode (#1269) * feat: pass bytecode and abi separately in CLI code * chore: move throwing of error on invalid proof into `verify_with_path` --- crates/nargo_cli/src/cli/execute_cmd.rs | 19 ++++----- crates/nargo_cli/src/cli/mod.rs | 1 - crates/nargo_cli/src/cli/prove_cmd.rs | 38 ++++++++---------- crates/nargo_cli/src/cli/verify_cmd.rs | 51 ++++++++----------------- 4 files changed, 40 insertions(+), 69 deletions(-) diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index adeefc860a5..b9b2be3febf 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -1,9 +1,10 @@ use std::path::Path; +use acvm::acir::circuit::Circuit; use acvm::PartialWitnessGenerator; use clap::Args; use noirc_abi::input_parser::{Format, InputValue}; -use noirc_abi::{InputMap, WitnessMap}; +use noirc_abi::{Abi, InputMap, WitnessMap}; use noirc_driver::{CompileOptions, CompiledProgram}; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; @@ -48,15 +49,15 @@ fn execute_with_path( ) -> Result<(Option, WitnessMap), CliError> { let backend = crate::backends::ConcreteBackend::default(); - let compiled_program = compile_circuit(&backend, program_dir, compile_options)?; + let CompiledProgram { abi, circuit } = compile_circuit(&backend, program_dir, compile_options)?; // Parse the initial witness values from Prover.toml let (inputs_map, _) = - read_inputs_from_file(program_dir, PROVER_INPUT_FILE, Format::Toml, &compiled_program.abi)?; + read_inputs_from_file(program_dir, PROVER_INPUT_FILE, Format::Toml, &abi)?; - let solved_witness = execute_program(&backend, &compiled_program, &inputs_map)?; + let solved_witness = execute_program(&backend, circuit, &abi, &inputs_map)?; - let public_abi = compiled_program.abi.public_abi(); + let public_abi = abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; Ok((return_value, solved_witness)) @@ -64,13 +65,13 @@ fn execute_with_path( pub(crate) fn execute_program( backend: &impl PartialWitnessGenerator, - compiled_program: &CompiledProgram, + circuit: Circuit, + abi: &Abi, inputs_map: &InputMap, ) -> Result { - let initial_witness = compiled_program.abi.encode(inputs_map, None)?; + let initial_witness = abi.encode(inputs_map, None)?; - let solved_witness = - nargo::ops::execute_circuit(backend, compiled_program.circuit.clone(), initial_witness)?; + let solved_witness = nargo::ops::execute_circuit(backend, circuit, initial_witness)?; Ok(solved_witness) } diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index 2bb92925e59..ef54682ab50 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -1,6 +1,5 @@ use clap::{Args, Parser, Subcommand}; use const_format::formatcp; -use noirc_abi::InputMap; use noirc_driver::CompileOptions; use std::path::{Path, PathBuf}; diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index cecdee23fee..2b9ed4b067a 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -2,9 +2,9 @@ use std::path::{Path, PathBuf}; use clap::Args; use nargo::artifacts::program::PreprocessedProgram; -use nargo::ops::{preprocess_program, prove_execution}; +use nargo::ops::{preprocess_program, prove_execution, verify_proof}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram}; +use noirc_driver::CompileOptions; use super::NargoConfig; use super::{ @@ -16,7 +16,7 @@ use super::{ }, }; use crate::{ - cli::{execute_cmd::execute_program, verify_cmd::verify_proof}, + cli::execute_cmd::execute_program, constants::{PROOFS_DIR, PROVER_INPUT_FILE, TARGET_DIR, VERIFIER_INPUT_FILE}, errors::CliError, }; @@ -78,20 +78,15 @@ pub(crate) fn prove_with_path>( let PreprocessedProgram { abi, bytecode, proving_key, verification_key, .. } = preprocessed_program; - let compiled_program = CompiledProgram { abi, circuit: bytecode }; // Parse the initial witness values from Prover.toml - let (inputs_map, _) = read_inputs_from_file( - &program_dir, - PROVER_INPUT_FILE, - Format::Toml, - &compiled_program.abi, - )?; + let (inputs_map, _) = + read_inputs_from_file(&program_dir, PROVER_INPUT_FILE, Format::Toml, &abi)?; - let solved_witness = execute_program(&backend, &compiled_program, &inputs_map)?; + let solved_witness = execute_program(&backend, bytecode.clone(), &abi, &inputs_map)?; // Write public inputs into Verifier.toml - let public_abi = compiled_program.abi.clone().public_abi(); + let public_abi = abi.public_abi(); let (public_inputs, return_value) = public_abi.decode(&solved_witness)?; write_inputs_to_file( @@ -102,19 +97,16 @@ pub(crate) fn prove_with_path>( Format::Toml, )?; - let proof = prove_execution(&backend, &compiled_program.circuit, solved_witness, &proving_key)?; + let proof = prove_execution(&backend, &bytecode, solved_witness, &proving_key)?; if check_proof { - let no_proof_name = "".into(); - verify_proof( - &backend, - &compiled_program, - public_inputs, - return_value, - &proof, - &verification_key, - no_proof_name, - )?; + let public_inputs = public_abi.encode(&public_inputs, return_value)?; + let valid_proof = + verify_proof(&backend, &bytecode, &proof, public_inputs, &verification_key)?; + + if !valid_proof { + return Err(CliError::InvalidProof("".into())); + } } let proof_path = if let Some(proof_name) = proof_name { diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index 07b7e351ee9..84a6416d44e 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -1,16 +1,16 @@ use super::compile_cmd::compile_circuit; use super::fs::{inputs::read_inputs_from_file, load_hex_data, program::read_program_from_file}; -use super::{InputMap, NargoConfig}; +use super::NargoConfig; use crate::{ constants::{PROOFS_DIR, PROOF_EXT, TARGET_DIR, VERIFIER_INPUT_FILE}, errors::CliError, }; -use acvm::ProofSystemCompiler; + use clap::Args; use nargo::artifacts::program::PreprocessedProgram; use nargo::ops::preprocess_program; -use noirc_abi::input_parser::{Format, InputValue}; -use noirc_driver::{CompileOptions, CompiledProgram}; +use noirc_abi::input_parser::Format; +use noirc_driver::CompileOptions; use std::path::{Path, PathBuf}; /// Given a proof and a program, verify whether the proof is valid @@ -34,7 +34,12 @@ pub(crate) fn run(args: VerifyCommand, config: NargoConfig) -> Result<(), CliErr .circuit_name .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); - verify_with_path(config.program_dir, proof_path, circuit_build_path, args.compile_options) + verify_with_path( + &config.program_dir, + proof_path, + circuit_build_path.as_ref(), + args.compile_options, + ) } fn verify_with_path>( @@ -55,47 +60,21 @@ fn verify_with_path>( }; let PreprocessedProgram { abi, bytecode, verification_key, .. } = preprocessed_program; - let compiled_program = CompiledProgram { abi, circuit: bytecode }; // Load public inputs (if any) from `VERIFIER_INPUT_FILE`. - let public_abi = compiled_program.abi.clone().public_abi(); + let public_abi = abi.public_abi(); let (public_inputs_map, return_value) = read_inputs_from_file(program_dir, VERIFIER_INPUT_FILE, Format::Toml, &public_abi)?; - verify_proof( - &backend, - &compiled_program, - public_inputs_map, - return_value, - &load_hex_data(&proof_path)?, - &verification_key, - proof_path, - ) -} - -pub(crate) fn verify_proof( - backend: &impl ProofSystemCompiler, - compiled_program: &CompiledProgram, - public_inputs_map: InputMap, - return_value: Option, - proof: &[u8], - verification_key: &[u8], - proof_name: PathBuf, -) -> Result<(), CliError> { - let public_abi = compiled_program.abi.clone().public_abi(); let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; + let proof = load_hex_data(&proof_path)?; - let valid_proof = nargo::ops::verify_proof( - backend, - &compiled_program.circuit, - proof, - public_inputs, - verification_key, - )?; + let valid_proof = + nargo::ops::verify_proof(&backend, &bytecode, &proof, public_inputs, &verification_key)?; if valid_proof { Ok(()) } else { - Err(CliError::InvalidProof(proof_name)) + Err(CliError::InvalidProof(proof_path)) } } From 144ebf51522fb19847be28de5595247051fcd92e Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Wed, 3 May 2023 08:50:48 +0100 Subject: [PATCH 33/66] feat(ssa refactor): Implement dominator tree (#1278) * feat(ssa refactor): dominator tree * reorder functions * chore(ssa refactor): use true post order * chore(ssa refactor): externalise PostOrder * chore(ssa refactor): use FunctionBuilder in tests * chore(ssa refactor): add header comments * chore(ssa refactor): clippy * chore(ssa refactor): elaborate header comments * chore(ssa refactor): domtree/post-order cleanup: - Comment tweaks - rm unneeded collect iter into vec - clippy changes - DominatorTree::with_function helper - rename DomNode DominatorTreeNode * chore(ssa refactor): use domtree helper in tests * fix: update usage of cfg.pred_iter to cfg.predecessors * chore(saa refactor): tidy up test and naming --- crates/noirc_evaluator/src/ssa_refactor/ir.rs | 2 + .../src/ssa_refactor/ir/basic_block.rs | 4 +- .../src/ssa_refactor/ir/dom.rs | 433 ++++++++++++++++++ .../src/ssa_refactor/ir/post_order.rs | 163 +++++++ .../src/ssa_refactor/ssa_gen/program.rs | 2 +- 5 files changed, 602 insertions(+), 2 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa_refactor/ir.rs index 1f6cca9157d..d52f380d3d4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir.rs @@ -2,9 +2,11 @@ pub(crate) mod basic_block; pub(crate) mod cfg; pub(crate) mod constant; pub(crate) mod dfg; +pub(crate) mod dom; pub(crate) mod function; pub(crate) mod instruction; pub(crate) mod map; +pub(crate) mod post_order; pub(crate) mod printer; pub(crate) mod types; pub(crate) mod value; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index 8a3f74c4a64..e8b09f518d8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -76,7 +76,9 @@ impl BasicBlock { /// Iterate over all the successors of the currently block, as determined by /// the blocks jumped to in the terminator instruction. If there is no terminator /// instruction yet, this will iterate 0 times. - pub(crate) fn successors(&self) -> impl ExactSizeIterator { + pub(crate) fn successors( + &self, + ) -> impl ExactSizeIterator + DoubleEndedIterator { match &self.terminator { Some(TerminatorInstruction::Jmp { destination, .. }) => vec![*destination].into_iter(), Some(TerminatorInstruction::JmpIf { then_destination, else_destination, .. }) => { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs new file mode 100644 index 00000000000..9a0916f62c8 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs @@ -0,0 +1,433 @@ +//! The dominator tree of a function, represented as a hash map of each reachable block id to its +//! immediate dominator. +//! +//! Dominator trees are useful for tasks such as identifying back-edges in loop analysis or +//! calculating dominance frontiers. + +use std::{cmp::Ordering, collections::HashMap}; + +use super::{ + basic_block::BasicBlockId, cfg::ControlFlowGraph, function::Function, post_order::PostOrder, +}; + +/// Dominator tree node. We keep one of these per reachable block. +#[derive(Clone, Default)] +struct DominatorTreeNode { + /// The block's idx in the control flow graph's reverse post-order + reverse_post_order_idx: u32, + + /// The block that immediately dominated that of the node in question. + /// + /// This will be None for the entry block, which has no immediate dominator. + immediate_dominator: Option, +} + +impl DominatorTreeNode { + /// Updates the immediate dominator estimate, returning true if it has changed. + /// + /// This is used internally as a shorthand during `compute_dominator_tree`. + pub(self) fn update_estimate(&mut self, immediate_dominator: BasicBlockId) -> bool { + let immediate_dominator = Some(immediate_dominator); + if self.immediate_dominator == immediate_dominator { + false + } else { + self.immediate_dominator = immediate_dominator; + true + } + } +} + +/// The dominator tree for a single function. +pub(crate) struct DominatorTree { + /// The nodes of the dominator tree + /// + /// After dominator tree computation has complete, this will contain a node for every + /// reachable block, and no nodes for unreachable blocks. + nodes: HashMap, +} + +/// Methods for querying the dominator tree. +impl DominatorTree { + /// Is `block_id` reachable from the entry block? + pub(crate) fn is_reachable(&self, block_id: BasicBlockId) -> bool { + self.nodes.contains_key(&block_id) + } + + /// Returns the immediate dominator of `block_id`. + /// + /// A block is said to *dominate* `block_id` if all control flow paths from the function + /// entry to `block_id` must go through the block. + /// + /// The *immediate dominator* is the dominator that is closest to `block_id`. All other + /// dominators also dominate the immediate dominator. + /// + /// This returns `None` if `block_id` is not reachable from the entry block, or if it is the + /// entry block which has no dominators. + pub(crate) fn immediate_dominator(&self, block_id: BasicBlockId) -> Option { + self.nodes.get(&block_id).and_then(|node| node.immediate_dominator) + } + + /// Compare two blocks relative to the reverse post-order. + pub(crate) fn reverse_post_order_cmp(&self, a: BasicBlockId, b: BasicBlockId) -> Ordering { + match (self.nodes.get(&a), self.nodes.get(&b)) { + (Some(a), Some(b)) => a.reverse_post_order_idx.cmp(&b.reverse_post_order_idx), + _ => unreachable!("Post order for unreachable block is undefined"), + } + } + + /// Returns `true` if `block_a_id` dominates `block_b_id`. + /// + /// This means that every control-flow path from the function entry to `block_b_id` must go + /// through `block_a_id`. + /// + /// This function panics if either of the blocks are unreachable. + /// + /// An instruction is considered to dominate itself. + pub(crate) fn dominates(&self, block_a_id: BasicBlockId, mut block_b_id: BasicBlockId) -> bool { + // Walk up the dominator tree from "b" until we encounter or pass "a". Doing the + // comparison on the reverse post-order may allows to test whether we have passed "a" + // without waiting until we reach the root of the tree. + loop { + match self.reverse_post_order_cmp(block_a_id, block_b_id) { + Ordering::Less => { + block_b_id = match self.immediate_dominator(block_b_id) { + Some(immediate_dominator) => immediate_dominator, + None => return false, // a is unreachable, so we climbed past the entry + } + } + Ordering::Greater => return false, + Ordering::Equal => return true, + } + } + } + + /// Allocate and compute a dominator tree from a pre-computed control flow graph and + /// post-order counterpart. + pub(crate) fn with_cfg_and_post_order(cfg: &ControlFlowGraph, post_order: &PostOrder) -> Self { + let mut dom_tree = DominatorTree { nodes: HashMap::new() }; + dom_tree.compute_dominator_tree(cfg, post_order); + dom_tree + } + + /// Allocate and compute a dominator tree for the given function. + /// + /// This approach computes the control flow graph and post-order internally and then + /// discards them. If either should be retained reuse it is better to instead pre-compute them + /// and build the dominator tree with `DominatorTree::with_cfg_and_post_order`. + pub(crate) fn with_function(func: &Function) -> Self { + let cfg = ControlFlowGraph::with_function(func); + let post_order = PostOrder::with_function(func); + Self::with_cfg_and_post_order(&cfg, &post_order) + } + + /// Build a dominator tree from a control flow graph using Keith D. Cooper's + /// "Simple, Fast Dominator Algorithm." + fn compute_dominator_tree(&mut self, cfg: &ControlFlowGraph, post_order: &PostOrder) { + // We'll be iterating over a reverse post-order of the CFG, skipping the entry block. + let (entry_block_id, entry_free_post_order) = post_order + .as_slice() + .split_last() + .expect("ICE: functions always have at least one block"); + + // Do a first pass where we assign reverse post-order indices to all reachable nodes. The + // entry block will be the only node with no immediate dominator. + self.nodes.insert( + *entry_block_id, + DominatorTreeNode { reverse_post_order_idx: 0, immediate_dominator: None }, + ); + for (i, &block_id) in entry_free_post_order.iter().rev().enumerate() { + // Indices have been displaced by 1 by the removal of the entry node + let reverse_post_order_idx = i as u32 + 1; + + // Due to the nature of the post-order traversal, every node we visit will have at + // least one predecessor that has previously been assigned during this loop. + let immediate_dominator = self.compute_immediate_dominator(block_id, cfg); + self.nodes.insert( + block_id, + DominatorTreeNode { + immediate_dominator: Some(immediate_dominator), + reverse_post_order_idx, + }, + ); + } + + // Now that we have reverse post-order indices for everything and initial immediate + // dominator estimates, iterate until convergence. + // + // If the function is free of irreducible control flow, this will exit after one iteration. + let mut changed = true; + while changed { + changed = false; + for &block_id in entry_free_post_order.iter().rev() { + let immediate_dominator = self.compute_immediate_dominator(block_id, cfg); + changed = self + .nodes + .get_mut(&block_id) + .expect("Assigned in first pass") + .update_estimate(immediate_dominator); + } + } + } + + // Compute the immediate dominator for `block_id` using the pre-calculate immediate dominators + // of reachable nodes. + fn compute_immediate_dominator( + &self, + block_id: BasicBlockId, + cfg: &ControlFlowGraph, + ) -> BasicBlockId { + // Get an iterator with just the reachable, already visited predecessors to `block_id`. + // Note that during the first pass `node` was pre-populated with all reachable blocks. + let mut reachable_predecessors = + cfg.predecessors(block_id).filter(|pred_id| self.nodes.contains_key(pred_id)); + + // This function isn't called on unreachable blocks or the entry block, so the reverse + // post-order will contain at least one predecessor to this block. + let mut immediate_dominator = + reachable_predecessors.next().expect("block node must have one reachable predecessor"); + + for predecessor in reachable_predecessors { + immediate_dominator = self.common_dominator(immediate_dominator, predecessor); + } + + immediate_dominator + } + + /// Compute the common dominator of two basic blocks. + /// + /// Both basic blocks are assumed to be reachable. + fn common_dominator( + &self, + mut block_a_id: BasicBlockId, + mut block_b_id: BasicBlockId, + ) -> BasicBlockId { + loop { + match self.reverse_post_order_cmp(block_a_id, block_b_id) { + Ordering::Less => { + // "a" comes before "b" in the reverse post-order. Move "b" up. + block_b_id = self.nodes[&block_b_id] + .immediate_dominator + .expect("Unreachable basic block?"); + } + Ordering::Greater => { + // "b" comes before "a" in the reverse post-order. Move "a" up. + block_a_id = self.nodes[&block_a_id] + .immediate_dominator + .expect("Unreachable basic block?"); + } + Ordering::Equal => break, + } + } + + debug_assert_eq!(block_a_id, block_b_id, "Unreachable block passed to common_dominator?"); + block_a_id + } +} + +#[cfg(test)] +mod tests { + use std::cmp::Ordering; + + use crate::ssa_refactor::{ + ir::{ + basic_block::BasicBlockId, dom::DominatorTree, function::Function, + instruction::TerminatorInstruction, map::Id, types::Type, + }, + ssa_builder::FunctionBuilder, + }; + + #[test] + fn empty() { + let func_id = Id::test_new(0); + let mut func = Function::new("func".into(), func_id); + let block0_id = func.entry_block(); + func.dfg.set_block_terminator( + block0_id, + TerminatorInstruction::Return { return_values: vec![] }, + ); + let dom_tree = DominatorTree::with_function(&func); + assert!(dom_tree.dominates(block0_id, block0_id)); + } + + // Testing setup for a function with an unreachable block2 + fn unreachable_node_setup( + ) -> (DominatorTree, BasicBlockId, BasicBlockId, BasicBlockId, BasicBlockId) { + // func() { + // block0(cond: u1): + // jmpif v0 block2() block3() + // block1(): + // jmp block2() + // block2(): + // jmp block3() + // block3(): + // return () + // } + let func_id = Id::test_new(0); + let mut builder = FunctionBuilder::new("func".into(), func_id); + + let cond = builder.add_parameter(Type::unsigned(1)); + let block1_id = builder.insert_block(); + let block2_id = builder.insert_block(); + let block3_id = builder.insert_block(); + + builder.terminate_with_jmpif(cond, block2_id, block3_id); + builder.switch_to_block(block1_id); + builder.terminate_with_jmp(block2_id, vec![]); + builder.switch_to_block(block2_id); + builder.terminate_with_jmp(block3_id, vec![]); + builder.switch_to_block(block3_id); + builder.terminate_with_return(vec![]); + + let ssa = builder.finish(); + let func = ssa.functions.first().unwrap(); + let block0_id = func.entry_block(); + + let dt = DominatorTree::with_function(func); + (dt, block0_id, block1_id, block2_id, block3_id) + } + + // Expected dominator tree + // block0 { + // block2 + // block3 + // } + + // Dominance matrix + // ✓: Row item dominates column item + // !: Querying row item's dominance of column item panics. (i.e. invalid) + // b0 b1 b2 b3 + // b0 ✓ ! ✓ ✓ + // b1 ! ! ! ! + // b2 ! ✓ + // b3 ! ✓ + // Note that from a local view block 1 dominates blocks 1,2 & 3, but since this block is + // unreachable, performing this query indicates an internal compiler error. + #[test] + fn unreachable_node_asserts() { + let (dt, b0, _b1, b2, b3) = unreachable_node_setup(); + + assert!(dt.dominates(b0, b0)); + assert!(dt.dominates(b0, b2)); + assert!(dt.dominates(b0, b3)); + + assert!(!dt.dominates(b2, b0)); + assert!(dt.dominates(b2, b2)); + assert!(!dt.dominates(b2, b3)); + + assert!(!dt.dominates(b3, b0)); + assert!(!dt.dominates(b3, b2)); + assert!(dt.dominates(b3, b3)); + } + + #[test] + #[should_panic] + fn unreachable_node_panic_b0_b1() { + let (dt, b0, b1, _b2, _b3) = unreachable_node_setup(); + dt.dominates(b0, b1); + } + + #[test] + #[should_panic] + fn unreachable_node_panic_b1_b0() { + let (dt, b0, b1, _b2, _b3) = unreachable_node_setup(); + dt.dominates(b1, b0); + } + + #[test] + #[should_panic] + fn unreachable_node_panic_b1_b1() { + let (dt, _b0, b1, _b2, _b3) = unreachable_node_setup(); + dt.dominates(b1, b1); + } + + #[test] + #[should_panic] + fn unreachable_node_panic_b1_b2() { + let (dt, _b0, b1, b2, _b3) = unreachable_node_setup(); + dt.dominates(b1, b2); + } + + #[test] + #[should_panic] + fn unreachable_node_panic_b1_b3() { + let (dt, _b0, b1, _b2, b3) = unreachable_node_setup(); + dt.dominates(b1, b3); + } + + #[test] + #[should_panic] + fn unreachable_node_panic_b3_b1() { + let (dt, _b0, b1, b2, _b3) = unreachable_node_setup(); + dt.dominates(b2, b1); + } + + #[test] + fn backwards_layout() { + // func { + // block0(): + // jmp block2() + // block1(): + // return () + // block2(): + // jump block1() + // } + let func_id = Id::test_new(0); + let mut builder = FunctionBuilder::new("func".into(), func_id); + let block1_id = builder.insert_block(); + let block2_id = builder.insert_block(); + + builder.terminate_with_jmp(block2_id, vec![]); + builder.switch_to_block(block1_id); + builder.terminate_with_return(vec![]); + builder.switch_to_block(block2_id); + builder.terminate_with_jmp(block1_id, vec![]); + + let ssa = builder.finish(); + let func = ssa.functions.first().unwrap(); + let block0_id = func.entry_block(); + + let dt = DominatorTree::with_function(func); + + // Expected dominance tree: + // block0 { + // block2 { + // block1 + // } + // } + + assert_eq!(dt.immediate_dominator(block0_id), None); + assert_eq!(dt.immediate_dominator(block1_id), Some(block2_id)); + assert_eq!(dt.immediate_dominator(block2_id), Some(block0_id)); + + assert_eq!(dt.reverse_post_order_cmp(block0_id, block0_id), Ordering::Equal); + assert_eq!(dt.reverse_post_order_cmp(block0_id, block1_id), Ordering::Less); + assert_eq!(dt.reverse_post_order_cmp(block0_id, block2_id), Ordering::Less); + + assert_eq!(dt.reverse_post_order_cmp(block1_id, block0_id), Ordering::Greater); + assert_eq!(dt.reverse_post_order_cmp(block1_id, block1_id), Ordering::Equal); + assert_eq!(dt.reverse_post_order_cmp(block1_id, block2_id), Ordering::Greater); + + assert_eq!(dt.reverse_post_order_cmp(block2_id, block0_id), Ordering::Greater); + assert_eq!(dt.reverse_post_order_cmp(block2_id, block1_id), Ordering::Less); + assert_eq!(dt.reverse_post_order_cmp(block2_id, block2_id), Ordering::Equal); + + // Dominance matrix: + // ✓: Row item dominates column item + // b0 b1 b2 + // b0 ✓ ✓ ✓ + // b1 ✓ + // b2 ✓ ✓ + + assert!(dt.dominates(block0_id, block0_id)); + assert!(dt.dominates(block0_id, block1_id)); + assert!(dt.dominates(block0_id, block2_id)); + + assert!(!dt.dominates(block1_id, block0_id)); + assert!(dt.dominates(block1_id, block1_id)); + assert!(!dt.dominates(block1_id, block2_id)); + + assert!(!dt.dominates(block2_id, block0_id)); + assert!(dt.dominates(block2_id, block1_id)); + assert!(dt.dominates(block2_id, block2_id)); + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs new file mode 100644 index 00000000000..984f10a64af --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs @@ -0,0 +1,163 @@ +//! The post-order for a given function represented as a vector of basic block ids. +//! +//! This ordering is beneficial to the efficiency of various algorithms, such as those for dead +//! code elimination and calculating dominance trees. + +use std::collections::HashSet; + +use crate::ssa_refactor::ir::{basic_block::BasicBlockId, function::Function}; + +/// Depth-first traversal stack state marker for computing the cfg post-order. +enum Visit { + First, + Last, +} + +pub(crate) struct PostOrder(Vec); + +impl PostOrder { + pub(crate) fn as_slice(&self) -> &[BasicBlockId] { + self.0.as_slice() + } +} + +impl PostOrder { + /// Allocate and compute a function's block post-order. Pos + pub(crate) fn with_function(func: &Function) -> Self { + PostOrder(Self::compute_post_order(func)) + } + + // Computes the post-order of the function by doing a depth-first traversal of the + // function's entry block's previously unvisited children. Each block is sequenced according + // to when the traversal exits it. + fn compute_post_order(func: &Function) -> Vec { + let mut stack = vec![(Visit::First, func.entry_block())]; + let mut visited: HashSet = HashSet::new(); + let mut post_order: Vec = Vec::new(); + + while let Some((visit, block_id)) = stack.pop() { + match visit { + Visit::First => { + if !visited.contains(&block_id) { + // This is the first time we pop the block, so we need to scan its + // successors and then revisit it. + visited.insert(block_id); + stack.push((Visit::Last, block_id)); + // Stack successors for visiting. Because items are taken from the top of the + // stack, we push the item that's due for a visit first to the top. + for successor_id in func.dfg[block_id].successors().rev() { + if !visited.contains(&successor_id) { + // This not visited check would also be cover by the the next + // iteration, but checking here two saves an iteration per successor. + stack.push((Visit::First, successor_id)); + } + } + } + } + + Visit::Last => { + // We've finished all this node's successors. + post_order.push(block_id); + } + } + } + post_order + } +} + +#[cfg(test)] +mod tests { + use crate::ssa_refactor::ir::{ + function::Function, instruction::TerminatorInstruction, map::Id, post_order::PostOrder, + types::Type, + }; + + #[test] + fn single_block() { + let func_id = Id::test_new(0); + let func = Function::new("func".into(), func_id); + let post_order = PostOrder::with_function(&func); + assert_eq!(post_order.0, [func.entry_block()]); + } + + #[test] + fn arb_graph_with_unreachable() { + // A → B C + // ↓ ↗ ↓ ↓ + // D ← E → F + // (`A` is entry block) + // Expected post-order working: + // A { + // B { + // E { + // D { + // B (seen) + // } -> push(D) + // F { + // } -> push(F) + // } -> push(E) + // } -> push(B) + // D (seen) + // } -> push(A) + // Result: + // D, F, E, B, A, (C dropped as unreachable) + + let func_id = Id::test_new(0); + let mut func = Function::new("func".into(), func_id); + let block_a_id = func.entry_block(); + let block_b_id = func.dfg.make_block(); + let block_c_id = func.dfg.make_block(); + let block_d_id = func.dfg.make_block(); + let block_e_id = func.dfg.make_block(); + let block_f_id = func.dfg.make_block(); + + // A → B • + // ↓ + // D • • + let cond_a = func.dfg.add_block_parameter(block_a_id, Type::unsigned(1)); + func.dfg.set_block_terminator( + block_a_id, + TerminatorInstruction::JmpIf { + condition: cond_a, + then_destination: block_b_id, + else_destination: block_d_id, + }, + ); + // • B • + // • ↓ • + // • E • + func.dfg.set_block_terminator( + block_b_id, + TerminatorInstruction::Jmp { destination: block_e_id, arguments: vec![] }, + ); + // • • • + // + // D ← E → F + let cond_e = func.dfg.add_block_parameter(block_e_id, Type::unsigned(1)); + func.dfg.set_block_terminator( + block_e_id, + TerminatorInstruction::JmpIf { + condition: cond_e, + then_destination: block_d_id, + else_destination: block_f_id, + }, + ); + // • B • + // ↗ + // D • • + func.dfg.set_block_terminator( + block_d_id, + TerminatorInstruction::Jmp { destination: block_b_id, arguments: vec![] }, + ); + // • • C + // • • ↓ + // • • F + func.dfg.set_block_terminator( + block_c_id, + TerminatorInstruction::Jmp { destination: block_f_id, arguments: vec![] }, + ); + + let post_order = PostOrder::with_function(&func); + assert_eq!(post_order.0, [block_d_id, block_f_id, block_e_id, block_b_id, block_a_id]); + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs index 99d49456210..de4f01fc613 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs @@ -4,7 +4,7 @@ use crate::ssa_refactor::ir::function::Function; /// Contains the entire SSA representation of the program. pub struct Ssa { - functions: Vec, + pub functions: Vec, } impl Ssa { From 9f434507fa431a9dbf4130374b866a5de6176d76 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Wed, 3 May 2023 12:01:42 +0200 Subject: [PATCH 34/66] feat: enable dynamic arrays (#1271) * enable dynamic arrays * code review: add a new test for dynamic arrays --- .../tests/test_data/6_array/src/main.nr | 17 ------------ .../tests/test_data/array_dynamic/Nargo.toml | 5 ++++ .../tests/test_data/array_dynamic/Prover.toml | 5 ++++ .../tests/test_data/array_dynamic/src/main.nr | 20 ++++++++++++++ .../noirc_frontend/src/hir/type_check/expr.rs | 26 ++++--------------- .../noirc_frontend/src/hir/type_check/stmt.rs | 21 ++++++--------- 6 files changed, 43 insertions(+), 51 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/array_dynamic/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/6_array/src/main.nr b/crates/nargo_cli/tests/test_data/6_array/src/main.nr index 30d3ab5a22f..9593c56524f 100644 --- a/crates/nargo_cli/tests/test_data/6_array/src/main.nr +++ b/crates/nargo_cli/tests/test_data/6_array/src/main.nr @@ -1,7 +1,6 @@ //Basic tests for arrays fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { let mut c = 2301; - let _idx = (z - 5*t - 5) as Field; z = y[4]; //Test 1: for i in 0..5 { @@ -51,21 +50,5 @@ fn main(x: [u32; 5], y: [u32; 5], mut z: u32, t: u32) { assert(x_elem != y_elem); } } - - //dynamic array test - TODO uncomment the call below when activating dynamic arrays - //dyn_array(x, idx, idx - 3); } -// fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { -// assert(x[y] == 111); -// assert(x[z] == 101); -// x[z] = 0; -// assert(x[y] == 111); -// assert(x[1] == 0); -// if y as u32 < 10 { -// x[y] = x[y] - 2; -// } else { -// x[y] = 0; -// } -// assert(x[4] == 109); -// } diff --git a/crates/nargo_cli/tests/test_data/array_dynamic/Nargo.toml b/crates/nargo_cli/tests/test_data/array_dynamic/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/array_dynamic/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml b/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml new file mode 100644 index 00000000000..1e652e28d1c --- /dev/null +++ b/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml @@ -0,0 +1,5 @@ +x = [104, 101, 108, 108, 111] +z = "59" +t = "10" + + diff --git a/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr b/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr new file mode 100644 index 00000000000..73dc162eb1b --- /dev/null +++ b/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr @@ -0,0 +1,20 @@ + +fn main(x: [u32; 5], mut z: u32, t: u32) { + let idx = (z - 5*t - 5) as Field; + //dynamic array test + dyn_array(x, idx, idx - 3); +} + +fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { + constrain x[y] == 111; + constrain x[z] == 101; + x[z] = 0; + constrain x[y] == 111; + constrain x[1] == 0; + if y as u32 < 10 { + x[y] = x[y] - 2; + } else { + x[y] = 0; + } + constrain x[4] == 109; +} \ No newline at end of file diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index aba44e36d2c..1929af8d223 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -252,29 +252,13 @@ impl<'interner> TypeChecker<'interner> { let index_type = self.check_expression(&index_expr.index); let span = self.interner.expr_span(&index_expr.index); - self.unify(&index_type, &Type::comp_time(Some(span)), span, || { - // Specialize the error in the case the user has a Field, just not a `comptime` one. - if matches!(index_type, Type::FieldElement(..)) { - TypeCheckError::Unstructured { - msg: format!("Array index must be known at compile-time, but here a non-comptime {index_type} was used instead"), - span, - } - } else { - TypeCheckError::TypeMismatch { - expected_typ: "comptime Field".to_owned(), - expr_typ: index_type.to_string(), - expr_span: span, - } + index_type.make_subtype_of(&Type::field(Some(span)), span, &mut self.errors, || { + TypeCheckError::TypeMismatch { + expected_typ: "Field".to_owned(), + expr_typ: index_type.to_string(), + expr_span: span, } }); - // TODO: replace the above by the below in order to activate dynamic arrays - // index_type.make_subtype_of(&Type::field(Some(span)), span, errors, || { - // TypeCheckError::TypeMismatch { - // expected_typ: "Field".to_owned(), - // expr_typ: index_type.to_string(), - // expr_span: span, - // } - // }); let lhs_type = self.check_expression(&index_expr.collection); match lhs_type { diff --git a/crates/noirc_frontend/src/hir/type_check/stmt.rs b/crates/noirc_frontend/src/hir/type_check/stmt.rs index c5ad7011414..ccb35070a36 100644 --- a/crates/noirc_frontend/src/hir/type_check/stmt.rs +++ b/crates/noirc_frontend/src/hir/type_check/stmt.rs @@ -170,21 +170,16 @@ impl<'interner> TypeChecker<'interner> { let index_type = self.check_expression(&index); let expr_span = self.interner.expr_span(&index); - self.unify(&index_type, &Type::comp_time(Some(expr_span)), expr_span, || { - TypeCheckError::TypeMismatch { - expected_typ: "comptime Field".to_owned(), + index_type.make_subtype_of( + &Type::field(Some(expr_span)), + expr_span, + &mut self.errors, + || TypeCheckError::TypeMismatch { + expected_typ: "Field".to_owned(), expr_typ: index_type.to_string(), expr_span, - } - }); - //TODO replace the above by the below in order to activate dynamic arrays - // index_type.make_subtype_of(&Type::field(Some(expr_span)), expr_span, || { - // TypeCheckError::TypeMismatch { - // expected_typ: "Field".to_owned(), - // expr_typ: index_type.to_string(), - // expr_span, - // } - // }); + }, + ); let (result, array) = self.check_lvalue(*array, assign_span); let array = Box::new(array); From e739329cefbfa274c34e200c3dbfb1156cd50f5c Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 3 May 2023 14:31:22 -0400 Subject: [PATCH 35/66] chore(ssa refactor): Remove unused variable module (#1284) Remove unused variable module --- .../noirc_evaluator/src/frontend/variable.rs | 23 ------------------- crates/noirc_evaluator/src/lib.rs | 3 --- 2 files changed, 26 deletions(-) delete mode 100644 crates/noirc_evaluator/src/frontend/variable.rs diff --git a/crates/noirc_evaluator/src/frontend/variable.rs b/crates/noirc_evaluator/src/frontend/variable.rs deleted file mode 100644 index 449581cf93c..00000000000 --- a/crates/noirc_evaluator/src/frontend/variable.rs +++ /dev/null @@ -1,23 +0,0 @@ -/// A variable in the SSA IR. -/// By definition, a variable can only be defined once. -/// -/// As in Cranelift, we also allow variable use before definition. -/// This will produce side-effects which will need to be handled -/// before sealing a block. -pub struct Variable(u32); - -impl From for Variable { - fn from(value: u32) -> Self { - Variable(value) - } -} -impl From for Variable { - fn from(value: u16) -> Self { - Variable(value as u32) - } -} -impl From for Variable { - fn from(value: u8) -> Self { - Variable(value as u32) - } -} diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index 438ada0167c..b044c70570a 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -9,9 +9,6 @@ mod ssa; // SSA code to create the SSA based IR // for functions and execute different optimizations. pub mod ssa_refactor; -// Frontend helper module to translate a different AST -// into the SSA IR. -pub mod frontend; use acvm::{ acir::circuit::{opcodes::Opcode as AcirOpcode, Circuit, PublicInputs}, From 80f436d610e61e63a0f956addde99aa5f2ecc3b7 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 3 May 2023 14:55:41 -0400 Subject: [PATCH 36/66] chore(ssa refactor): Add entry point for acir gen pass (#1285) * Add entry point for acir gen pass * Fix typo --- crates/noirc_evaluator/src/ssa_refactor.rs | 12 +++++++++ .../src/ssa_refactor/acir_gen/mod.rs | 26 +++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index fc45071e579..a55f61f71d6 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -7,6 +7,18 @@ //! This module heavily borrows from Cranelift #![allow(dead_code)] +use noirc_frontend::monomorphization::ast::Program; + +use self::acir_gen::Acir; + +mod acir_gen; mod ir; mod ssa_builder; pub mod ssa_gen; + +/// Optimize the given program by converting it into SSA +/// form and performing optimizations there. When finished, +/// convert the final SSA into ACIR and return it. +pub fn optimize_into_acir(program: Program) -> Acir { + ssa_gen::generate_ssa(program).into_acir() +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs new file mode 100644 index 00000000000..a0959db5db8 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs @@ -0,0 +1,26 @@ +//! This file holds the pass to convert from Noir's SSA IR to ACIR. +use super::ssa_gen::Ssa; + +/// Context struct for the acir generation pass. +/// May be similar to the Evaluator struct in the current SSA IR. +struct Context {} + +/// The output of the Acir-gen pass +pub struct Acir {} + +impl Ssa { + pub(crate) fn into_acir(self) -> Acir { + let mut context = Context::new(); + context.convert_ssa(self) + } +} + +impl Context { + fn new() -> Self { + Self {} + } + + fn convert_ssa(&mut self, _ssa: Ssa) -> Acir { + todo!() + } +} From b6e606b5c5b28f3497e0ce1124f79ddba7caa7a0 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Thu, 4 May 2023 12:53:03 +0100 Subject: [PATCH 37/66] chore(tests): use assert instead of constrain (#1288) --- .../tests/test_data/array_dynamic/src/main.nr | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr b/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr index 73dc162eb1b..23041188724 100644 --- a/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr @@ -6,15 +6,15 @@ fn main(x: [u32; 5], mut z: u32, t: u32) { } fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { - constrain x[y] == 111; - constrain x[z] == 101; + assert(x[y] == 111); + assert(x[z] == 101); x[z] = 0; - constrain x[y] == 111; - constrain x[1] == 0; + assert(x[y] == 111); + assert(x[1] == 0); if y as u32 < 10 { x[y] = x[y] - 2; } else { x[y] = 0; } - constrain x[4] == 109; + assert(x[4] == 109); } \ No newline at end of file From afa67494c564b68b667535f2d8ef234fbc4bec12 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Thu, 4 May 2023 13:50:09 +0100 Subject: [PATCH 38/66] feat(ssa refactor): experimental-ssa compiler flag (#1289) * feat(ssa refactor): experimental-ssa compiler flag * move experimental method into ssa_refactor * check the experimental flag in the driver module * undo changes to `create_circuit` --------- Co-authored-by: Kevaundray Wedderburn --- crates/nargo_cli/src/cli/mod.rs | 7 ++++- crates/noirc_driver/src/lib.rs | 34 ++++++++++++++++------ crates/noirc_evaluator/src/ssa_refactor.rs | 16 ++++++++++ 3 files changed, 47 insertions(+), 10 deletions(-) diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index ef54682ab50..5450bb39660 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -85,7 +85,12 @@ pub fn start_cli() -> eyre::Result<()> { // helper function which tests noir programs by trying to generate a proof and verify it pub fn prove_and_verify(proof_name: &str, program_dir: &Path, show_ssa: bool) -> bool { - let compile_options = CompileOptions { show_ssa, allow_warnings: false, show_output: false }; + let compile_options = CompileOptions { + show_ssa, + allow_warnings: false, + show_output: false, + experimental_ssa: false, + }; let proof_dir = program_dir.join(PROOFS_DIR); match prove_cmd::prove_with_path( diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index 2fcef5bc578..a2fbed21885 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -10,7 +10,7 @@ use fm::FileType; use iter_extended::try_vecmap; use noirc_abi::FunctionSignature; use noirc_errors::{reporter, ReportedError}; -use noirc_evaluator::create_circuit; +use noirc_evaluator::{create_circuit, ssa_refactor::experimental_create_circuit}; use noirc_frontend::graph::{CrateId, CrateName, CrateType, LOCAL_CRATE}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; @@ -43,11 +43,15 @@ pub struct CompileOptions { /// Display output of `println` statements #[arg(long)] pub show_output: bool, + + /// Compile and optimize using the new experimental SSA pass + #[arg(long)] + pub experimental_ssa: bool, } impl Default for CompileOptions { fn default() -> Self { - Self { show_ssa: false, allow_warnings: false, show_output: true } + Self { show_ssa: false, allow_warnings: false, show_output: true, experimental_ssa: false } } } @@ -254,13 +258,25 @@ impl Driver { let np_language = self.language.clone(); let is_opcode_supported = acvm::default_is_opcode_supported(np_language.clone()); - match create_circuit( - program, - np_language, - is_opcode_supported, - options.show_ssa, - options.show_output, - ) { + let circuit_abi = if options.experimental_ssa { + experimental_create_circuit( + program, + np_language, + is_opcode_supported, + options.show_ssa, + options.show_output, + ) + } else { + create_circuit( + program, + np_language, + is_opcode_supported, + options.show_ssa, + options.show_output, + ) + }; + + match circuit_abi { Ok((circuit, abi)) => Ok(CompiledProgram { circuit, abi }), Err(err) => { // The FileId here will be the file id of the file with the main file diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index a55f61f71d6..83f76c85ec1 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -7,6 +7,10 @@ //! This module heavily borrows from Cranelift #![allow(dead_code)] +use crate::errors::RuntimeError; +use acvm::{acir::circuit::Circuit, compiler::transformers::IsOpcodeSupported, Language}; +use noirc_abi::Abi; + use noirc_frontend::monomorphization::ast::Program; use self::acir_gen::Acir; @@ -22,3 +26,15 @@ pub mod ssa_gen; pub fn optimize_into_acir(program: Program) -> Acir { ssa_gen::generate_ssa(program).into_acir() } +/// Compiles the Program into ACIR and applies optimizations to the arithmetic gates +/// This is analogous to `ssa:create_circuit` and this method is called when one wants +/// to use the new ssa module to process Noir code. +pub fn experimental_create_circuit( + _program: Program, + _np_language: Language, + _is_opcode_supported: IsOpcodeSupported, + _enable_logging: bool, + _show_output: bool, +) -> Result<(Circuit, Abi), RuntimeError> { + todo!("this is a stub function for the new SSA refactor module") +} From 16fd3279f2b4bf81f5bde2474b454f112c03fb3e Mon Sep 17 00:00:00 2001 From: Globallager <72797635+Globallager@users.noreply.github.com> Date: Thu, 4 May 2023 21:29:13 +0800 Subject: [PATCH 39/66] fix(ci): Labeling `doc needed` PRs with uppercase checkbox `- [X]` (#1290) fix: Labeling `doc needed` PRs with uppercase checkbox `- [X]` --- .github/labeler.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/labeler.yml b/.github/labeler.yml index 8e0c08c95cb..dea78c8fb32 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,3 +1,3 @@ # Add/remove 'doc needed' label if issue/PR contains the line '- [x] This PR requires documentation updates when merged.' "doc needed": - - '- \[x\] This PR requires documentation updates when merged.' + - '- \[(x|X)\] This PR requires documentation updates when merged.' From 951ad71e0f8bc9a6e95ae21197854396ed7f6e78 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Thu, 4 May 2023 18:55:02 +0200 Subject: [PATCH 40/66] fix(ssa): add block opcode (#1291) * add block opcode * add regression test --- .../tests/test_data/array_dynamic/Prover.toml | 1 + .../tests/test_data/array_dynamic/src/main.nr | 10 ++++++- crates/noirc_evaluator/src/ssa/acir_gen.rs | 2 +- .../src/ssa/acir_gen/acir_mem.rs | 26 +++++++++---------- crates/noirc_evaluator/src/ssa/mem.rs | 4 +++ 5 files changed, 28 insertions(+), 15 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml b/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml index 1e652e28d1c..ff6f02ccdac 100644 --- a/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml +++ b/crates/nargo_cli/tests/test_data/array_dynamic/Prover.toml @@ -1,5 +1,6 @@ x = [104, 101, 108, 108, 111] z = "59" t = "10" +index = [0,1,2,3,4] diff --git a/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr b/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr index 23041188724..a4073fd55cb 100644 --- a/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_dynamic/src/main.nr @@ -1,8 +1,16 @@ -fn main(x: [u32; 5], mut z: u32, t: u32) { +fn main(x: [u32; 5], mut z: u32, t: u32, index: [Field;5]) { let idx = (z - 5*t - 5) as Field; //dynamic array test dyn_array(x, idx, idx - 3); + + // regression for issue 1283 + let mut s = 0; + let x3 = [246,159,32,176,8]; + for i in 0..5 { + s += x3[index[i]]; + } + assert(s!=0); } fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { diff --git a/crates/noirc_evaluator/src/ssa/acir_gen.rs b/crates/noirc_evaluator/src/ssa/acir_gen.rs index b964672e285..22b5390e2fa 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen.rs @@ -46,7 +46,7 @@ impl Acir { //TODO we should rather follow the jumps current_block = block.left.map(|block_id| &ctx[block_id]); } - self.memory.acir_gen(evaluator); + self.memory.acir_gen(evaluator, ctx); Ok(()) } diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/acir_mem.rs b/crates/noirc_evaluator/src/ssa/acir_gen/acir_mem.rs index 0c55f61ca20..ac3395d9411 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/acir_mem.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/acir_mem.rs @@ -8,7 +8,10 @@ use crate::{ }; use acvm::{ acir::{ - circuit::{directives::Directive, opcodes::Opcode as AcirOpcode}, + circuit::{ + directives::Directive, + opcodes::{BlockId as AcirBlockId, MemOp, MemoryBlock, Opcode as AcirOpcode}, + }, native_types::{Expression, Witness}, }, FieldElement, @@ -22,15 +25,6 @@ use super::{ operations::{self}, }; -/// Represent a memory operation on the ArrayHeap, at the specified index -/// Operation is one for a store and 0 for a load -#[derive(Clone, Debug)] -pub(crate) struct MemOp { - operation: Expression, - value: Expression, - index: Expression, -} - type MemAddress = u32; enum ArrayType { @@ -137,7 +131,7 @@ impl ArrayHeap { outputs } - pub(crate) fn acir_gen(&self, evaluator: &mut Evaluator) { + pub(crate) fn acir_gen(&self, evaluator: &mut Evaluator, array_id: ArrayId, array_len: u32) { let (len, read_write) = match self.typ { ArrayType::Init(_, _) | ArrayType::WriteOnly => (0, true), ArrayType::ReadOnly(last) => (last.unwrap_or(self.trace.len()), false), @@ -147,6 +141,11 @@ impl ArrayHeap { if len == 0 { return; } + evaluator.opcodes.push(AcirOpcode::Block(MemoryBlock { + id: AcirBlockId(array_id.as_u32()), + len: array_len, + trace: self.trace.clone(), + })); let len_bits = AcirMem::bits(len); // permutations let mut in_counter = Vec::new(); @@ -318,9 +317,10 @@ impl AcirMem { let item = MemOp { operation: op, value, index }; self.array_heap_mut(*array_id).push(item); } - pub(crate) fn acir_gen(&self, evaluator: &mut Evaluator) { + pub(crate) fn acir_gen(&self, evaluator: &mut Evaluator, ctx: &SsaContext) { for mem in &self.virtual_memory { - mem.1.acir_gen(evaluator); + let array = &ctx.mem[*mem.0]; + mem.1.acir_gen(evaluator, array.id, array.len); } } } diff --git a/crates/noirc_evaluator/src/ssa/mem.rs b/crates/noirc_evaluator/src/ssa/mem.rs index e4a82a7e59c..09fdc33b704 100644 --- a/crates/noirc_evaluator/src/ssa/mem.rs +++ b/crates/noirc_evaluator/src/ssa/mem.rs @@ -23,6 +23,10 @@ impl ArrayId { pub(crate) fn dummy() -> ArrayId { ArrayId(std::u32::MAX) } + + pub(crate) fn as_u32(&self) -> u32 { + self.0 + } } /// MemArray represents a contiguous array of elements of the same type. From 7bac22bc6ad809e85b92d6b05ecea2e93ebba5ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Thu, 4 May 2023 18:56:37 +0200 Subject: [PATCH 41/66] feat(wasm): improved serialization of contracts in wasm (#1281) feat(wasm): serialization of contracts in wasm --- crates/noirc_driver/src/contract.rs | 5 ++++- crates/wasm/src/compile.rs | 14 +------------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/crates/noirc_driver/src/contract.rs b/crates/noirc_driver/src/contract.rs index a5600c3d215..3f69a06d1e1 100644 --- a/crates/noirc_driver/src/contract.rs +++ b/crates/noirc_driver/src/contract.rs @@ -1,6 +1,7 @@ use acvm::acir::circuit::Circuit; use noirc_abi::Abi; use serde::{Deserialize, Serialize}; +use crate::program::{serialize_circuit, deserialize_circuit}; /// Describes the types of smart contract functions that are allowed. /// Unlike the similar enum in noirc_frontend, 'open' and 'unconstrained' @@ -19,6 +20,7 @@ pub enum ContractFunctionType { Unconstrained, } +#[derive(Serialize, Deserialize)] pub struct CompiledContract { /// The name of the contract. pub name: String, @@ -33,7 +35,7 @@ pub struct CompiledContract { /// A contract function unlike a regular Noir program /// however can have additional properties. /// One of these being a function type. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct ContractFunction { pub name: String, @@ -41,6 +43,7 @@ pub struct ContractFunction { pub abi: Abi, + #[serde(serialize_with = "serialize_circuit", deserialize_with = "deserialize_circuit")] pub bytecode: Circuit, } diff --git a/crates/wasm/src/compile.rs b/crates/wasm/src/compile.rs index 18bd30029bd..ecf2b789365 100644 --- a/crates/wasm/src/compile.rs +++ b/crates/wasm/src/compile.rs @@ -95,19 +95,7 @@ pub fn compile(args: JsValue) -> JsValue { .compile_contracts(&options.compile_options) .unwrap_or_else(|_| panic!("Contract compilation failed")); - // Flatten each contract into a list of its functions, each being assigned a unique name. - let collected_compiled_programs: Vec<_> = compiled_contracts - .into_iter() - .flat_map(|contract| { - let contract_id = format!("{}-{}", options.circuit_name, &contract.name); - contract.functions.into_iter().map(move |contract_function| { - let program_name = format!("{}-{}", contract_id, contract_function.name); - (program_name, contract_function.bytecode) - }) - }) - .collect(); - - ::from_serde(&collected_compiled_programs).unwrap() + ::from_serde(&compiled_contracts).unwrap() } else { let main = driver.main_function().unwrap_or_else(|_| panic!("Could not find main function!")); From 9740f54c28f30ea9367897fa986d8aea1aba79f2 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Thu, 4 May 2023 19:15:13 +0100 Subject: [PATCH 42/66] chore(parser)!: deprecate `constrain` keyword for `assert` (#1286) * chore(parser)!: deprecate constrain * chore(parser): typo in error * add shorter message * chore(parser): constrain tests expect deprecated * chore(parser): rm type annotation --------- Co-authored-by: Kevaundray Wedderburn --- crates/noirc_frontend/src/ast/mod.rs | 10 +++-- crates/noirc_frontend/src/ast/statement.rs | 14 ++++--- crates/noirc_frontend/src/parser/errors.rs | 45 +++++++++++++++------- crates/noirc_frontend/src/parser/mod.rs | 3 +- crates/noirc_frontend/src/parser/parser.rs | 33 +++++++++------- 5 files changed, 69 insertions(+), 36 deletions(-) diff --git a/crates/noirc_frontend/src/ast/mod.rs b/crates/noirc_frontend/src/ast/mod.rs index 6bd5c148d66..37eb944e0c6 100644 --- a/crates/noirc_frontend/src/ast/mod.rs +++ b/crates/noirc_frontend/src/ast/mod.rs @@ -16,7 +16,11 @@ use noirc_errors::Span; pub use statement::*; pub use structure::*; -use crate::{parser::ParserError, token::IntType, BinaryTypeOperator, CompTime}; +use crate::{ + parser::{ParserError, ParserErrorReason}, + token::IntType, + BinaryTypeOperator, CompTime, +}; use iter_extended::vecmap; /// The parser parses types as 'UnresolvedType's which @@ -152,9 +156,9 @@ impl UnresolvedTypeExpression { expr: Expression, span: Span, ) -> Result { - Self::from_expr_helper(expr).map_err(|err| { + Self::from_expr_helper(expr).map_err(|err_expr| { ParserError::with_reason( - format!("Expression is invalid in an array-length type: '{err}'. Only unsigned integer constants, globals, generics, +, -, *, /, and % may be used in this context."), + ParserErrorReason::InvalidArrayLengthExpression(err_expr), span, ) }) diff --git a/crates/noirc_frontend/src/ast/statement.rs b/crates/noirc_frontend/src/ast/statement.rs index 5e0dd4e4391..d4fabccea70 100644 --- a/crates/noirc_frontend/src/ast/statement.rs +++ b/crates/noirc_frontend/src/ast/statement.rs @@ -1,7 +1,7 @@ use std::fmt::Display; use crate::lexer::token::SpannedToken; -use crate::parser::ParserError; +use crate::parser::{ParserError, ParserErrorReason}; use crate::token::Token; use crate::{Expression, ExpressionKind, IndexExpression, MemberAccessExpression, UnresolvedType}; use iter_extended::vecmap; @@ -59,8 +59,10 @@ impl Statement { | Statement::Error => { // To match rust, statements always require a semicolon, even at the end of a block if semi.is_none() { - let reason = "Expected a ; separating these two statements".to_string(); - emit_error(ParserError::with_reason(reason, span)); + emit_error(ParserError::with_reason( + ParserErrorReason::MissingSeparatingSemi, + span, + )); } self } @@ -83,8 +85,10 @@ impl Statement { // for unneeded expressions like { 1 + 2; 3 } (_, Some(_), false) => Statement::Expression(expr), (_, None, false) => { - let reason = "Expected a ; separating these two statements".to_string(); - emit_error(ParserError::with_reason(reason, span)); + emit_error(ParserError::with_reason( + ParserErrorReason::MissingSeparatingSemi, + span, + )); Statement::Expression(expr) } diff --git a/crates/noirc_frontend/src/parser/errors.rs b/crates/noirc_frontend/src/parser/errors.rs index 7f19ef7f062..7012c0fbda5 100644 --- a/crates/noirc_frontend/src/parser/errors.rs +++ b/crates/noirc_frontend/src/parser/errors.rs @@ -1,18 +1,33 @@ use std::collections::BTreeSet; use crate::lexer::token::Token; -use crate::BinaryOp; +use crate::Expression; +use thiserror::Error; use iter_extended::vecmap; use noirc_errors::CustomDiagnostic as Diagnostic; use noirc_errors::Span; +#[derive(Debug, Clone, PartialEq, Eq, Error)] +pub enum ParserErrorReason { + #[error("Arrays must have at least one element")] + ZeroSizedArray, + #[error("Unexpected '{0}', expected a field name")] + ExpectedFieldName(Token), + #[error("Expected a ; separating these two statements")] + MissingSeparatingSemi, + #[error("constrain keyword is deprecated")] + ConstrainDeprecated, + #[error("Expression is invalid in an array-length type: '{0}'. Only unsigned integer constants, globals, generics, +, -, *, /, and % may be used in this context.")] + InvalidArrayLengthExpression(Expression), +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct ParserError { expected_tokens: BTreeSet, expected_labels: BTreeSet, found: Token, - reason: Option, + reason: Option, span: Span, } @@ -39,21 +54,11 @@ impl ParserError { error } - pub fn with_reason(reason: String, span: Span) -> ParserError { + pub fn with_reason(reason: ParserErrorReason, span: Span) -> ParserError { let mut error = ParserError::empty(Token::EOF, span); error.reason = Some(reason); error } - - pub fn invalid_constrain_operator(operator: BinaryOp) -> ParserError { - let message = format!( - "Cannot use the {} operator in a constraint statement.", - operator.contents.as_string() - ); - let mut error = ParserError::empty(operator.contents.as_token(), operator.span()); - error.reason = Some(message); - error - } } impl std::fmt::Display for ParserError { @@ -84,7 +89,19 @@ impl std::fmt::Display for ParserError { impl From for Diagnostic { fn from(error: ParserError) -> Diagnostic { match &error.reason { - Some(reason) => Diagnostic::simple_error(reason.clone(), String::new(), error.span), + Some(reason) => { + match reason { + ParserErrorReason::ConstrainDeprecated => Diagnostic::simple_warning( + "Use of deprecated keyword 'constrain'".into(), + "The 'constrain' keyword has been deprecated. Please use the 'assert' function instead.".into(), + error.span, + ), + other => { + + Diagnostic::simple_error(format!("{other}"), String::new(), error.span) + } + } + } None => { let primary = error.to_string(); Diagnostic::simple_error(primary, String::new(), error.span) diff --git a/crates/noirc_frontend/src/parser/mod.rs b/crates/noirc_frontend/src/parser/mod.rs index 788c0eec895..98b7fffbf14 100644 --- a/crates/noirc_frontend/src/parser/mod.rs +++ b/crates/noirc_frontend/src/parser/mod.rs @@ -24,6 +24,7 @@ use acvm::FieldElement; use chumsky::prelude::*; use chumsky::primitive::Container; pub use errors::ParserError; +pub use errors::ParserErrorReason; use noirc_errors::Span; pub use parser::parse_program; @@ -176,7 +177,7 @@ where .try_map(move |peek, span| { if too_far.get_iter().any(|t| t == peek) { // This error will never be shown to the user - Err(ParserError::with_reason(String::new(), span)) + Err(ParserError::empty(Token::EOF, span)) } else { Ok(Recoverable::error(span)) } diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 575a9403ea8..4f7c73e609b 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -26,7 +26,7 @@ use super::{ foldl_with_span, parameter_name_recovery, parameter_recovery, parenthesized, then_commit, then_commit_ignore, top_level_statement_recovery, ExprParser, ForRange, NoirParser, - ParsedModule, ParserError, Precedence, SubModule, TopLevelStatement, + ParsedModule, ParserError, ParserErrorReason, Precedence, SubModule, TopLevelStatement, }; use crate::ast::{Expression, ExpressionKind, LetStatement, Statement, UnresolvedType}; use crate::lexer::Lexer; @@ -448,6 +448,10 @@ where { ignore_then_commit(keyword(Keyword::Constrain).labelled("statement"), expr_parser) .map(|expr| Statement::Constrain(ConstrainStatement(expr))) + .validate(|expr, span, emit| { + emit(ParserError::with_reason(ParserErrorReason::ConstrainDeprecated, span)); + expr + }) } fn assertion<'a, P>(expr_parser: P) -> impl NoirParser + 'a @@ -877,10 +881,7 @@ where .delimited_by(just(Token::LeftBracket), just(Token::RightBracket)) .validate(|elements, span, emit| { if elements.is_empty() { - emit(ParserError::with_reason( - "Arrays must have at least one element".to_owned(), - span, - )); + emit(ParserError::with_reason(ParserErrorReason::ZeroSizedArray, span)); } ExpressionKind::array(elements) }) @@ -966,8 +967,7 @@ fn field_name() -> impl NoirParser { ident().or(token_kind(TokenKind::Literal).validate(|token, span, emit| match token { Token::Int(_) => Ident::from(Spanned::from(span, token.to_string())), other => { - let reason = format!("Unexpected '{other}', expected a field name"); - emit(ParserError::with_reason(reason, span)); + emit(ParserError::with_reason(ParserErrorReason::ExpectedFieldName(other), span)); Ident::error(span) } })) @@ -1196,10 +1196,12 @@ mod test { ); } - /// This is the standard way to declare a constrain statement + /// Deprecated constrain usage test #[test] fn parse_constrain() { - parse_with(constrain(expression()), "constrain x == y").unwrap(); + let errors = parse_with(constrain(expression()), "constrain x == y").unwrap_err(); + assert_eq!(errors.len(), 1); + assert!(format!("{}", errors.first().unwrap()).contains("deprecated")); // Currently we disallow constrain statements where the outer infix operator // produces a value. This would require an implicit `==` which @@ -1217,7 +1219,9 @@ mod test { for operator in disallowed_operators { let src = format!("constrain x {} y;", operator.as_string()); - parse_with(constrain(expression()), &src).unwrap_err(); + let errors = parse_with(constrain(expression()), &src).unwrap_err(); + assert_eq!(errors.len(), 2); + assert!(format!("{}", errors.first().unwrap()).contains("deprecated")); } // These are general cases which should always work. @@ -1226,7 +1230,7 @@ mod test { // The first (inner) `==` is a predicate which returns 0/1 // The outer layer is an infix `==` which is // associated with the Constrain statement - parse_all( + let errors = parse_all_failing( constrain(expression()), vec![ "constrain ((x + y) == k) + z == y", @@ -1236,8 +1240,11 @@ mod test { "constrain x + x ^ x == y | m", ], ); + assert_eq!(errors.len(), 5); + assert!(errors.iter().all(|err| { format!("{}", err).contains("deprecated") })); } + /// This is the standard way to declare an assert statement #[test] fn parse_assert() { parse_with(assertion(expression()), "assert(x == y)").unwrap(); @@ -1533,9 +1540,9 @@ mod test { ("let = ", 2, "let $error: unspecified = Error"), ("let", 3, "let $error: unspecified = Error"), ("foo = one two three", 1, "foo = plain::one"), - ("constrain", 1, "constrain Error"), + ("constrain", 2, "constrain Error"), ("assert", 1, "constrain Error"), - ("constrain x ==", 1, "constrain (plain::x == Error)"), + ("constrain x ==", 2, "constrain (plain::x == Error)"), ("assert(x ==)", 1, "constrain (plain::x == Error)"), ]; From 545340cba652c988d88f4a73dcda31fb54c4c3e8 Mon Sep 17 00:00:00 2001 From: Erika Reale <49906646+erikareale@users.noreply.github.com> Date: Fri, 5 May 2023 11:14:40 +0200 Subject: [PATCH 43/66] chore: replace usage of `&Vec` with `&[T]` in `noirc_evaluator` (#1298) * refactor: use slice arg instead of vec reference * style: handle `uninlined_format_args` clippy lint in noirc_frontend and noirc_evaluator --- .../src/ssa/acir_gen/operations/sort.rs | 14 +++++++------- .../noirc_evaluator/src/ssa_refactor/ir/printer.rs | 2 +- crates/noirc_frontend/src/hir_def/types.rs | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs index ffcbf1ea7c0..9566252b2be 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/operations/sort.rs @@ -12,8 +12,8 @@ use crate::{ // Generate gates which ensure that out_expr is a permutation of in_expr // Returns the control bits of the sorting network used to generate the constrains pub(crate) fn evaluate_permutation( - in_expr: &Vec, - out_expr: &Vec, + in_expr: &[Expression], + out_expr: &[Expression], evaluator: &mut Evaluator, ) -> Vec { let bits = Vec::new(); @@ -27,9 +27,9 @@ pub(crate) fn evaluate_permutation( // Same as evaluate_permutation() but uses the provided witness as network control bits pub(crate) fn evaluate_permutation_with_witness( - in_expr: &Vec, - out_expr: &Vec, - bits: &Vec, + in_expr: &[Expression], + out_expr: &[Expression], + bits: &[Witness], evaluator: &mut Evaluator, ) { let (w, b) = permutation_layer(in_expr, bits, false, evaluator); @@ -47,14 +47,14 @@ pub(crate) fn evaluate_permutation_with_witness( // in both cases it returns the witness of the network configuration // if generate_witness is true, bits is ignored fn permutation_layer( - in_expr: &Vec, + in_expr: &[Expression], bits: &[Witness], generate_witness: bool, evaluator: &mut Evaluator, ) -> (Vec, Vec) { let n = in_expr.len(); if n == 1 { - return (Vec::new(), in_expr.clone()); + return (Vec::new(), in_expr.to_vec()); } let n1 = n / 2; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs index 2e467017885..3993a862618 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs @@ -64,7 +64,7 @@ fn value(function: &Function, id: ValueId) -> String { match &function.dfg[id] { Value::NumericConstant { constant, typ } => { let value = function.dfg[*constant].value(); - format!("{} {}", typ, value) + format!("{typ} {value}") } Value::Function(id) => id.to_string(), Value::Intrinsic(intrinsic) => intrinsic.to_string(), diff --git a/crates/noirc_frontend/src/hir_def/types.rs b/crates/noirc_frontend/src/hir_def/types.rs index be7d90e089f..9a6f83ddd50 100644 --- a/crates/noirc_frontend/src/hir_def/types.rs +++ b/crates/noirc_frontend/src/hir_def/types.rs @@ -652,7 +652,7 @@ impl std::fmt::Display for Type { write!(f, "fn({}) -> {}", args.join(", "), ret) } Type::Vec(element) => { - write!(f, "Vec<{}>", element) + write!(f, "Vec<{element}>") } } } From b3bd80414769df0900ed97a99e600ede92a0b9b6 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 8 May 2023 11:45:48 +0100 Subject: [PATCH 44/66] chore: Remove unused `frontend` file (#1308) remove unused file --- crates/noirc_evaluator/src/frontend.rs | 1 - 1 file changed, 1 deletion(-) delete mode 100644 crates/noirc_evaluator/src/frontend.rs diff --git a/crates/noirc_evaluator/src/frontend.rs b/crates/noirc_evaluator/src/frontend.rs deleted file mode 100644 index 410f9f1a9b0..00000000000 --- a/crates/noirc_evaluator/src/frontend.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod variable; From b327422ac54135e5e3813fb7a8c1b1a358b78725 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 8 May 2023 11:45:57 +0100 Subject: [PATCH 45/66] chore: Remove outdated comment (#1306) remove outdated comment --- crates/noirc_evaluator/src/lib.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index b044c70570a..533adce4325 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -313,10 +313,6 @@ impl Evaluator { /// However, this intermediate representation is useful as it allows us to have /// intermediate Types which the core type system does not know about like Strings. fn parse_abi_alt(&mut self, ir_gen: &mut IrGenerator) { - // XXX: Currently, the syntax only supports public witnesses - // u8 and arrays are assumed to be private - // This is not a short-coming of the ABI, but of the grammar - // The new grammar has been conceived, and will be implemented. let main = ir_gen.program.main_mut(); let main_params = std::mem::take(&mut main.parameters); let abi_params = std::mem::take(&mut ir_gen.program.main_function_signature.0); From d872890e408ada056e9aab84a7774dcaa2049324 Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 8 May 2023 13:09:48 -0400 Subject: [PATCH 46/66] fix: Fix struct or tuple field assignment failing with generics (#1317) Fix #1315 --- crates/noirc_frontend/src/hir/type_check/stmt.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/noirc_frontend/src/hir/type_check/stmt.rs b/crates/noirc_frontend/src/hir/type_check/stmt.rs index ccb35070a36..73ae8ebc94e 100644 --- a/crates/noirc_frontend/src/hir/type_check/stmt.rs +++ b/crates/noirc_frontend/src/hir/type_check/stmt.rs @@ -153,7 +153,7 @@ impl<'interner> TypeChecker<'interner> { (Type::Error, None) }; - let (typ, field_index) = match result { + let (typ, field_index) = match result.follow_bindings() { Type::Struct(def, args) => { match def.borrow().get_field(&field_name.0.contents, &args) { Some((field, index)) => (field, Some(index)), From 460568e50a810f90db6559195492547095ab8c32 Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 8 May 2023 15:45:27 -0400 Subject: [PATCH 47/66] fix: Assigning to tuple fields (#1318) * Fix assigning to tuple fields * Cargo fmt * Formatting * Add regression test --- .../tests/test_data/tuples/src/main.nr | 20 +++++-- crates/noirc_driver/src/contract.rs | 2 +- .../noirc_frontend/src/hir/type_check/expr.rs | 55 ++++++++++++++----- .../noirc_frontend/src/hir/type_check/stmt.rs | 25 ++------- crates/noirc_frontend/src/parser/parser.rs | 2 +- 5 files changed, 63 insertions(+), 41 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/tuples/src/main.nr b/crates/nargo_cli/tests/test_data/tuples/src/main.nr index 4a003dc5a42..b1d310b1412 100644 --- a/crates/nargo_cli/tests/test_data/tuples/src/main.nr +++ b/crates/nargo_cli/tests/test_data/tuples/src/main.nr @@ -9,11 +9,21 @@ fn main(x: Field, y: Field) { assert(a == 0); assert(b == 1); - let (u,v) = if x as u32 <1 { - (x,x+1) + let (u,v) = if x as u32 < 1 { + (x, x + 1) } else { - (x+1,x) + (x + 1, x) }; - assert(u==x+1); - assert(v==x); + assert(u == x+1); + assert(v == x); + + // Test mutating tuples + let mut mutable = ((0, 0), 1, 2, 3); + mutable.0 = pair; + mutable.2 = 7; + assert(mutable.0.0 == 1); + assert(mutable.0.1 == 0); + assert(mutable.1 == 1); + assert(mutable.2 == 7); + assert(mutable.3 == 3); } diff --git a/crates/noirc_driver/src/contract.rs b/crates/noirc_driver/src/contract.rs index 3f69a06d1e1..c0a54534941 100644 --- a/crates/noirc_driver/src/contract.rs +++ b/crates/noirc_driver/src/contract.rs @@ -1,7 +1,7 @@ +use crate::program::{deserialize_circuit, serialize_circuit}; use acvm::acir::circuit::Circuit; use noirc_abi::Abi; use serde::{Deserialize, Serialize}; -use crate::program::{serialize_circuit, deserialize_circuit}; /// Describes the types of smart contract functions that are allowed. /// Unlike the similar enum in noirc_frontend, 'open' and 'unconstrained' diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index 1929af8d223..8a91ecbfde8 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -455,18 +455,47 @@ impl<'interner> TypeChecker<'interner> { fn check_member_access(&mut self, access: expr::HirMemberAccess, expr_id: ExprId) -> Type { let lhs_type = self.check_expression(&access.lhs).follow_bindings(); + let span = self.interner.expr_span(&expr_id); + + match self.check_field_access(&lhs_type, &access.rhs.0.contents, span) { + Some((element_type, index)) => { + self.interner.set_field_index(expr_id, index); + element_type + } + None => Type::Error, + } + } + + /// This will verify that an expression in the form `lhs.rhs_name` has the given field and will push + /// a type error if it does not. If there is no error, the type of the struct/tuple field is returned + /// along with the index of the field in question. + /// + /// This function is abstracted from check_member_access so that it can be shared between + /// there and the HirLValue::MemberAccess case of check_lvalue. + pub(super) fn check_field_access( + &mut self, + lhs_type: &Type, + field_name: &str, + span: Span, + ) -> Option<(Type, usize)> { + let lhs_type = lhs_type.follow_bindings(); if let Type::Struct(s, args) = &lhs_type { let s = s.borrow(); - if let Some((field, index)) = s.get_field(&access.rhs.0.contents, args) { - self.interner.set_field_index(expr_id, index); - return field; + if let Some((field, index)) = s.get_field(field_name, args) { + return Some((field, index)); } } else if let Type::Tuple(elements) = &lhs_type { - if let Ok(index) = access.rhs.0.contents.parse::() { - if index < elements.len() { - self.interner.set_field_index(expr_id, index); - return elements[index].clone(); + if let Ok(index) = field_name.parse::() { + let length = elements.len(); + if index < length { + return Some((elements[index].clone(), index)); + } else { + self.errors.push(TypeCheckError::Unstructured { + msg: format!("Index {index} is out of bounds for this tuple {lhs_type} of length {length}"), + span, + }); + return None; } } } @@ -474,17 +503,13 @@ impl<'interner> TypeChecker<'interner> { // If we get here the type has no field named 'access.rhs'. // Now we specialize the error message based on whether we know the object type in question yet. if let Type::TypeVariable(..) = &lhs_type { - self.errors.push(TypeCheckError::TypeAnnotationsNeeded { - span: self.interner.expr_span(&access.lhs), - }); + self.errors.push(TypeCheckError::TypeAnnotationsNeeded { span }); } else if lhs_type != Type::Error { - self.errors.push(TypeCheckError::Unstructured { - msg: format!("Type {lhs_type} has no member named {}", access.rhs), - span: self.interner.expr_span(&access.lhs), - }); + let msg = format!("Type {lhs_type} has no member named {field_name}"); + self.errors.push(TypeCheckError::Unstructured { msg, span }); } - Type::Error + None } fn comparator_operand_type_rules( diff --git a/crates/noirc_frontend/src/hir/type_check/stmt.rs b/crates/noirc_frontend/src/hir/type_check/stmt.rs index 73ae8ebc94e..7bd50392404 100644 --- a/crates/noirc_frontend/src/hir/type_check/stmt.rs +++ b/crates/noirc_frontend/src/hir/type_check/stmt.rs @@ -142,28 +142,15 @@ impl<'interner> TypeChecker<'interner> { (typ.clone(), HirLValue::Ident(ident, typ)) } HirLValue::MemberAccess { object, field_name, .. } => { - let (result, object) = self.check_lvalue(*object, assign_span); + let (lhs_type, object) = self.check_lvalue(*object, assign_span); let object = Box::new(object); - let mut error = |typ| { - self.errors.push(TypeCheckError::Unstructured { - msg: format!("Type {typ} has no member named {field_name}"), - span: field_name.span(), - }); - (Type::Error, None) - }; - - let (typ, field_index) = match result.follow_bindings() { - Type::Struct(def, args) => { - match def.borrow().get_field(&field_name.0.contents, &args) { - Some((field, index)) => (field, Some(index)), - None => error(Type::Struct(def.clone(), args)), - } - } - Type::Error => (Type::Error, None), - other => error(other), - }; + let span = field_name.span(); + let (typ, field_index) = self + .check_field_access(&lhs_type, &field_name.0.contents, span) + .unwrap_or((Type::Error, 0)); + let field_index = Some(field_index); (typ.clone(), HirLValue::MemberAccess { object, field_name, field_index, typ }) } HirLValue::Index { array, index, .. } => { diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 4f7c73e609b..d83cf6fd710 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -534,7 +534,7 @@ where { let l_ident = ident().map(LValue::Ident); - let l_member_rhs = just(Token::Dot).ignore_then(ident()).map(LValueRhs::MemberAccess); + let l_member_rhs = just(Token::Dot).ignore_then(field_name()).map(LValueRhs::MemberAccess); let l_index = expr_parser .delimited_by(just(Token::LeftBracket), just(Token::RightBracket)) From ad191cd6e0dbb7142d97231a2d388c0280ac0708 Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 8 May 2023 18:02:02 -0400 Subject: [PATCH 48/66] chore(ssa refactor): Document some SSA-gen functions (#1321) * Document some SSA-gen functions * Add additional comment --- .../src/ssa_refactor/ssa_gen/mod.rs | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index d6c5731e147..80fde837f9f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -31,6 +31,10 @@ pub fn generate_ssa(program: Program) -> Ssa { let mut function_context = FunctionContext::new(main.name.clone(), &main.parameters, &context); function_context.codegen_function_body(&main.body); + // Main has now been compiled and any other functions referenced within have been added to the + // function queue as they were found in codegen_ident. This queueing will happen each time a + // previously-unseen function is found so we need now only continue popping from this queue + // to generate SSA for each function used within the program. while let Some((src_function_id, dest_id)) = context.pop_next_function_in_queue() { let function = &context.program[src_function_id]; function_context.new_function(dest_id, function.name.clone(), &function.parameters); @@ -116,6 +120,15 @@ impl<'a> FunctionContext<'a> { } } + /// Codegen an array by allocating enough space for each element and inserting separate + /// store instructions until each element is stored. The store instructions will be separated + /// by add instructions to calculate the new offset address to store to next. + /// + /// In the case of arrays of structs, the structs are flattened such that each field will be + /// stored next to the other fields in memory. So an array such as [(1, 2), (3, 4)] is + /// stored the same as the array [1, 2, 3, 4]. + /// + /// The value returned from this function is always that of the allocate instruction. fn codegen_array(&mut self, elements: Vec, element_type: Tree) -> Values { let size = element_type.size_of_type() * elements.len(); let array = self.builder.insert_allocate(size.try_into().unwrap_or_else(|_| { @@ -206,6 +219,23 @@ impl<'a> FunctionContext<'a> { self.builder.insert_cast(lhs, typ).into() } + /// Codegens a for loop, creating three new blocks in the process. + /// The return value of a for loop is always a unit literal. + /// + /// For example, the loop `for i in start .. end { body }` is codegen'd as: + /// + /// v0 = ... codegen start ... + /// v1 = ... codegen end ... + /// br loop_entry(v0) + /// loop_entry(i: Field): + /// v2 = lt i v1 + /// brif v2, then: loop_body, else: loop_end + /// loop_body(): + /// v3 = ... codegen body ... + /// v4 = add 1, i + /// br loop_entry(v4) + /// loop_end(): + /// ... This is the current insert point after codegen_for finishes ... fn codegen_for(&mut self, for_expr: &ast::For) -> Values { let loop_entry = self.builder.insert_block(); let loop_body = self.builder.insert_block(); @@ -236,6 +266,30 @@ impl<'a> FunctionContext<'a> { self.unit_value() } + /// Codegens an if expression, handling the case of what to do if there is no 'else'. + /// + /// For example, the expression `if cond { a } else { b }` is codegen'd as: + /// + /// v0 = ... codegen cond ... + /// brif v0, then: then_block, else: else_block + /// then_block(): + /// v1 = ... codegen a ... + /// br end_if(v1) + /// else_block(): + /// v2 = ... codegen b ... + /// br end_if(v2) + /// end_if(v3: ?): // Type of v3 matches the type of a and b + /// ... This is the current insert point after codegen_if finishes ... + /// + /// As another example, the expression `if cond { a }` is codegen'd as: + /// + /// v0 = ... codegen cond ... + /// brif v0, then: then_block, else: end_block + /// then_block: + /// v1 = ... codegen a ... + /// br end_if() + /// end_if: // No block parameter is needed. Without an else, the unit value is always returned. + /// ... This is the current insert point after codegen_if finishes ... fn codegen_if(&mut self, if_expr: &ast::If) -> Values { let condition = self.codegen_non_tuple_expression(&if_expr.condition); @@ -287,6 +341,8 @@ impl<'a> FunctionContext<'a> { Self::get_field(tuple, field_index) } + /// Generate SSA for a function call. Note that calls to built-in functions + /// and intrinsics are also represented by the function call instruction. fn codegen_call(&mut self, call: &ast::Call) -> Values { let function = self.codegen_non_tuple_expression(&call.func); @@ -299,6 +355,10 @@ impl<'a> FunctionContext<'a> { self.insert_call(function, arguments, &call.return_type) } + /// Generate SSA for the given variable. + /// If the variable is immutable, no special handling is necessary and we can return the given + /// ValueId directly. If it is mutable, we'll need to allocate space for the value and store + /// the initial value before returning the allocate instruction. fn codegen_let(&mut self, let_expr: &ast::Let) -> Values { let mut values = self.codegen_expression(&let_expr.expression); From 36f5b8e88fe8048ece1a54755789d56c8803b3ab Mon Sep 17 00:00:00 2001 From: jfecher Date: Mon, 8 May 2023 18:28:19 -0400 Subject: [PATCH 49/66] fix: Parsing nested generics (#1319) * Implement parsing for nested generics * Add regression test * Fix test * Update crates/noirc_frontend/src/lexer/lexer.rs Co-authored-by: kevaundray * Uncomment line - merge error spans * Update crates/noirc_frontend/src/parser/parser.rs Co-authored-by: kevaundray * fix test --------- Co-authored-by: kevaundray --- .../tests/test_data/generics/src/main.nr | 5 +++ crates/noirc_frontend/src/lexer/lexer.rs | 8 ++-- crates/noirc_frontend/src/parser/errors.rs | 2 +- crates/noirc_frontend/src/parser/parser.rs | 38 +++++++++++++++---- 4 files changed, 40 insertions(+), 13 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/generics/src/main.nr b/crates/nargo_cli/tests/test_data/generics/src/main.nr index c506995adc3..bfde9d3c957 100644 --- a/crates/nargo_cli/tests/test_data/generics/src/main.nr +++ b/crates/nargo_cli/tests/test_data/generics/src/main.nr @@ -49,4 +49,9 @@ fn main(x: Field, y: Field) { // Expected type error // assert(bar2.get_other() == bar2.other); + + let one = x; + let two = y; + let nested_generics: Bar> = Bar { one, two, other: Bar { one, two, other: 0 } }; + assert(nested_generics.other.other == bar1.get_other()); } diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index 5e0d99cfed9..2c8583ef2c7 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -162,9 +162,8 @@ impl<'a> Lexer<'a> { if self.peek_char_is('=') { self.next_char(); Ok(Token::GreaterEqual.into_span(start, start + 1)) - } else if self.peek_char_is('>') { - self.next_char(); - Ok(Token::ShiftRight.into_span(start, start + 1)) + // Note: There is deliberately no case for RightShift. We always lex >> as + // two separate Greater tokens to help the parser parse nested generic types. } else { Ok(prev_token.into_single_span(start)) } @@ -387,7 +386,8 @@ fn test_single_double_char() { Token::Assign, Token::Equal, Token::ShiftLeft, - Token::ShiftRight, + Token::Greater, + Token::Greater, Token::EOF, ]; diff --git a/crates/noirc_frontend/src/parser/errors.rs b/crates/noirc_frontend/src/parser/errors.rs index 7012c0fbda5..c339835fbc3 100644 --- a/crates/noirc_frontend/src/parser/errors.rs +++ b/crates/noirc_frontend/src/parser/errors.rs @@ -147,7 +147,7 @@ impl chumsky::Error for ParserError { self.reason = other.reason; } - assert_eq!(self.span, other.span); + self.span = self.span.merge(other.span); self } } diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index d83cf6fd710..3a8c8f49303 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -517,9 +517,22 @@ where ) } +/// Parse an assignment operator `=` optionally prefixed by a binary operator for a combined +/// assign statement shorthand. Notably, this must handle a few corner cases with how `>>` is +/// lexed as two separate greater-than operators rather than a single right-shift. fn assign_operator() -> impl NoirParser { let shorthand_operators = Token::assign_shorthand_operators(); - let shorthand_syntax = one_of(shorthand_operators).then_ignore(just(Token::Assign)); + // We need to explicitly check for right_shift here since it is actually + // two separate greater-than operators. + let shorthand_operators = right_shift_operator().or(one_of(shorthand_operators)); + let shorthand_syntax = shorthand_operators.then_ignore(just(Token::Assign)); + + // Since >> is lexed as two separate greater-thans, >>= is lexed as > >=, so + // we need to account for that case here as well. + let right_shift_fix = + just(Token::Greater).then(just(Token::GreaterEqual)).map(|_| Token::ShiftRight); + + let shorthand_syntax = shorthand_syntax.or(right_shift_fix); just(Token::Assign).or(shorthand_syntax) } @@ -726,14 +739,23 @@ fn create_infix_expression(lhs: Expression, (operator, rhs): (BinaryOp, Expressi Expression { span, kind: ExpressionKind::Infix(infix) } } +// Right-shift (>>) is issued as two separate > tokens by the lexer as this makes it easier +// to parse nested generic types. For normal expressions however, it means we have to manually +// parse two greater-than tokens as a single right-shift here. +fn right_shift_operator() -> impl NoirParser { + just(Token::Greater).then(just(Token::Greater)).map(|_| Token::ShiftRight) +} + fn operator_with_precedence(precedence: Precedence) -> impl NoirParser> { - filter_map(move |span, token: Token| { - if Precedence::token_precedence(&token) == Some(precedence) { - Ok(token.try_into_binary_op(span).unwrap()) - } else { - Err(ParserError::expected_label("binary operator".to_string(), token, span)) - } - }) + right_shift_operator() + .or(any()) // Parse any single token, we're validating it as an operator next + .try_map(move |token, span| { + if Precedence::token_precedence(&token) == Some(precedence) { + Ok(token.try_into_binary_op(span).unwrap()) + } else { + Err(ParserError::expected_label("binary operator".to_string(), token, span)) + } + }) } fn term<'a, P>(expr_parser: P) -> impl NoirParser + 'a From 260d87d1ef86069a1fcf0f9b4969589273e381d1 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Tue, 9 May 2023 02:28:17 +0200 Subject: [PATCH 50/66] feat(stdlib): Add keccak (#1249) * Add keccak in stdlib * add simple keccak example * use assert * update to latest aztec_backend * update barretenberg to e66f1ef38c3c87c223456d8a77878c2bd3d346eb * change sha256 to keccak256 * update flake.lock * update commit * update cargo.lock --------- Co-authored-by: Kevaundray Wedderburn --- Cargo.lock | 31 +++++++++++++++- crates/nargo_cli/Cargo.toml | 3 +- .../tests/test_data/keccak256/Nargo.toml | 5 +++ .../tests/test_data/keccak256/Prover.toml | 35 +++++++++++++++++++ .../tests/test_data/keccak256/src/main.nr | 10 ++++++ crates/noirc_evaluator/src/ssa/builtin.rs | 9 ++--- flake.lock | 6 ++-- noir_stdlib/src/hash.nr | 3 ++ 8 files changed, 89 insertions(+), 13 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/keccak256/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/keccak256/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/keccak256/src/main.nr diff --git a/Cargo.lock b/Cargo.lock index b8647a9622d..6fe210ab93a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -49,7 +49,7 @@ dependencies = [ [[package]] name = "acvm-backend-barretenberg" version = "0.0.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=c9fb9e806f1400a2ff7594a0669bec56025220bb#c9fb9e806f1400a2ff7594a0669bec56025220bb" +source = "git+https://github.com/noir-lang/aztec_backend?rev=677f10e07011849f8aa0d75fe80390bb3081b1e5#677f10e07011849f8aa0d75fe80390bb3081b1e5" dependencies = [ "acvm", "barretenberg-sys", @@ -61,6 +61,7 @@ dependencies = [ "pkg-config", "reqwest", "rust-embed", + "sha3", "tokio", "wasmer", ] @@ -386,6 +387,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ + "block-padding", "generic-array", ] @@ -398,6 +400,12 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block-padding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" + [[package]] name = "bstr" version = "1.4.0" @@ -1676,6 +1684,15 @@ dependencies = [ "sha2 0.9.9", ] +[[package]] +name = "keccak" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +dependencies = [ + "cpufeatures", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -2820,6 +2837,18 @@ dependencies = [ "digest 0.10.6", ] +[[package]] +name = "sha3" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "keccak", + "opaque-debug", +] + [[package]] name = "sharded-slab" version = "0.1.4" diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 2568f2a86f5..6e8801301b1 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -37,7 +37,7 @@ termcolor = "1.1.2" color-eyre = "0.6.2" # Backends -acvm-backend-barretenberg = { git = "https://github.com/noir-lang/aztec_backend", rev = "c9fb9e806f1400a2ff7594a0669bec56025220bb", default-features=false } +acvm-backend-barretenberg = { git = "https://github.com/noir-lang/aztec_backend", rev = "677f10e07011849f8aa0d75fe80390bb3081b1e5", default-features = false } [dev-dependencies] tempdir = "0.3.7" @@ -50,4 +50,3 @@ default = ["plonk_bn254"] # The plonk backend can only use bn254, so we do not specify the field plonk_bn254 = ["acvm-backend-barretenberg/native"] plonk_bn254_wasm = ["acvm-backend-barretenberg/wasm"] - diff --git a/crates/nargo_cli/tests/test_data/keccak256/Nargo.toml b/crates/nargo_cli/tests/test_data/keccak256/Nargo.toml new file mode 100644 index 00000000000..e0b467ce5da --- /dev/null +++ b/crates/nargo_cli/tests/test_data/keccak256/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/keccak256/Prover.toml b/crates/nargo_cli/tests/test_data/keccak256/Prover.toml new file mode 100644 index 00000000000..d65c4011d3f --- /dev/null +++ b/crates/nargo_cli/tests/test_data/keccak256/Prover.toml @@ -0,0 +1,35 @@ +x = 0xbd +result = [ + 0x5a, + 0x50, + 0x2f, + 0x9f, + 0xca, + 0x46, + 0x7b, + 0x26, + 0x6d, + 0x5b, + 0x78, + 0x33, + 0x65, + 0x19, + 0x37, + 0xe8, + 0x05, + 0x27, + 0x0c, + 0xa3, + 0xf3, + 0xaf, + 0x1c, + 0x0d, + 0xd2, + 0x46, + 0x2d, + 0xca, + 0x4b, + 0x3b, + 0x1a, + 0xbf, +] diff --git a/crates/nargo_cli/tests/test_data/keccak256/src/main.nr b/crates/nargo_cli/tests/test_data/keccak256/src/main.nr new file mode 100644 index 00000000000..8fafbaa4e27 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/keccak256/src/main.nr @@ -0,0 +1,10 @@ +// Keccak256 example +// +use dep::std; + +fn main(x: Field, result: [u8; 32]) { + // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field + // The padding is taken care of by the program + let digest = std::hash::keccak256([x as u8]); + assert(digest == result); +} diff --git a/crates/noirc_evaluator/src/ssa/builtin.rs b/crates/noirc_evaluator/src/ssa/builtin.rs index 8e402343bb5..4f3741583d8 100644 --- a/crates/noirc_evaluator/src/ssa/builtin.rs +++ b/crates/noirc_evaluator/src/ssa/builtin.rs @@ -73,6 +73,7 @@ impl Opcode { match op { // Pointers do not overflow BlackBoxFunc::SHA256 + | BlackBoxFunc::Keccak256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Pedersen | BlackBoxFunc::FixedBaseScalarMul => BigUint::zero(), @@ -84,9 +85,6 @@ impl Opcode { BlackBoxFunc::AES => { todo!("ICE: AES is unimplemented") } - BlackBoxFunc::Keccak256 => { - todo!("ICE: Keccak256 is unimplemented") - } BlackBoxFunc::RANGE | BlackBoxFunc::AND | BlackBoxFunc::XOR => { unimplemented!("ICE: these opcodes do not have Noir builtin functions") } @@ -105,10 +103,7 @@ impl Opcode { Opcode::LowLevel(op) => { match op { BlackBoxFunc::AES => todo!("ICE: AES is unimplemented"), - BlackBoxFunc::Keccak256 => { - todo!("ICE: Keccak256 is unimplemented") - } - BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => { + BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Keccak256 => { (32, ObjectType::unsigned_integer(8)) } BlackBoxFunc::ComputeMerkleRoot | BlackBoxFunc::HashToField128Security => { diff --git a/flake.lock b/flake.lock index 6a6aff10f73..8d6445d3cf6 100644 --- a/flake.lock +++ b/flake.lock @@ -10,11 +10,11 @@ ] }, "locked": { - "lastModified": 1682626614, - "narHash": "sha256-TC535frlYLUTDZ2iHAtUtKpMJWngL1MFxzEXhOfeCo8=", + "lastModified": 1683314474, + "narHash": "sha256-gfHYpOnVTfS+4fhScBhfkB/e5z+jPFCi8zSy+aEh+8s=", "owner": "AztecProtocol", "repo": "barretenberg", - "rev": "7b5598890c1fa4ee485a4a0015fcb23b5389392e", + "rev": "ad615ee7dc931d3dbea041e47c96b9d8dccebf98", "type": "github" }, "original": { diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 614918ae492..0618897d203 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -12,6 +12,9 @@ fn pedersen(_input : [Field; N]) -> [Field; 2] {} #[foreign(hash_to_field_128_security)] fn hash_to_field(_input : [Field; N]) -> Field {} +#[foreign(keccak256)] +fn keccak256(_input : [u8; N]) -> [u8; 32] {} + // mimc-p/p implementation // constants are (publicly generated) random numbers, for instance using keccak as a ROM. // You must use constants generated for the native field From b1dea8ab169c8b4c8b10d3c325def5f1f1e35f86 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 9 May 2023 12:13:55 +0100 Subject: [PATCH 51/66] chore: fix installation link in readme (#1326) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1830e43016f..f6977ff23e8 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Noir is a Domain Specific Language for SNARK proving systems. It has been design ## Quick Start -Read the installation section [here](https://noir-lang.org/getting_started/nargo/nargo_installation). +Read the installation section [here](https://noir-lang.org/getting_started/nargo_installation). Once you have read through the documentation, you can visit [Awesome Noir](https://github.com/noir-lang/awesome-noir) to run some of the examples that others have created. From 75b2d165b13848811bfe9c920512c4abd00f048e Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 9 May 2023 12:13:55 +0100 Subject: [PATCH 52/66] chore: fix installation link in readme (#1326) From fa1af50497d5ad0eed6e4cf9df0afcdf01e7c69b Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 9 May 2023 07:16:09 -0400 Subject: [PATCH 53/66] chore(ssa refactor): Implement function inlining (#1293) * Start inlining pass * Get most of pass working * Finish function inlining pass * Add basic test * Address PR comments --- crates/noirc_evaluator/src/ssa_refactor.rs | 3 +- .../src/ssa_refactor/ir/basic_block.rs | 15 +- .../src/ssa_refactor/ir/dfg.rs | 18 +- .../src/ssa_refactor/ir/dom.rs | 4 +- .../src/ssa_refactor/ir/function.rs | 9 +- .../src/ssa_refactor/ir/instruction.rs | 39 +- .../src/ssa_refactor/ir/map.rs | 18 + .../src/ssa_refactor/opt/inlining.rs | 420 ++++++++++++++++++ .../src/ssa_refactor/opt/mod.rs | 6 + .../src/ssa_refactor/ssa_builder/mod.rs | 33 +- .../src/ssa_refactor/ssa_gen/mod.rs | 4 +- .../src/ssa_refactor/ssa_gen/program.rs | 37 +- 12 files changed, 566 insertions(+), 40 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs create mode 100644 crates/noirc_evaluator/src/ssa_refactor/opt/mod.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index 83f76c85ec1..34061227336 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -17,6 +17,7 @@ use self::acir_gen::Acir; mod acir_gen; mod ir; +mod opt; mod ssa_builder; pub mod ssa_gen; @@ -24,7 +25,7 @@ pub mod ssa_gen; /// form and performing optimizations there. When finished, /// convert the final SSA into ACIR and return it. pub fn optimize_into_acir(program: Program) -> Acir { - ssa_gen::generate_ssa(program).into_acir() + ssa_gen::generate_ssa(program).inline_functions().into_acir() } /// Compiles the Program into ACIR and applies optimizations to the arithmetic gates /// This is analogous to `ssa:create_circuit` and this method is called when one wants diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs index e8b09f518d8..30526bc296e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs @@ -29,10 +29,10 @@ pub(crate) struct BasicBlock { pub(crate) type BasicBlockId = Id; impl BasicBlock { - /// Create a new BasicBlock with the given parameters. + /// Create a new BasicBlock with the given instructions. /// Parameters can also be added later via BasicBlock::add_parameter - pub(crate) fn new(parameters: Vec) -> Self { - Self { parameters, instructions: Vec::new(), terminator: None } + pub(crate) fn new(instructions: Vec) -> Self { + Self { parameters: Vec::new(), instructions, terminator: None } } /// Returns the parameters of this block @@ -57,6 +57,11 @@ impl BasicBlock { &self.instructions } + /// Retrieve a mutable reference to all instructions in this block. + pub(crate) fn instructions_mut(&mut self) -> &mut Vec { + &mut self.instructions + } + /// Sets the terminator instruction of this block. /// /// A properly-constructed block will always terminate with a TerminatorInstruction - @@ -91,8 +96,10 @@ impl BasicBlock { /// Removes the given instruction from this block if present or panics otherwise. pub(crate) fn remove_instruction(&mut self, instruction: InstructionId) { + // Iterate in reverse here as an optimization since remove_instruction is most + // often called to remove instructions at the end of a block. let index = - self.instructions.iter().position(|id| *id == instruction).unwrap_or_else(|| { + self.instructions.iter().rev().position(|id| *id == instruction).unwrap_or_else(|| { panic!("remove_instruction: No such instruction {instruction:?} in block") }); self.instructions.remove(index); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 67569c6a4c2..3ab345f06b9 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -13,7 +13,6 @@ use super::{ }; use acvm::FieldElement; -use iter_extended::vecmap; /// The DataFlowGraph contains most of the actual data in a function including /// its blocks, instructions, and values. This struct is largely responsible for @@ -69,22 +68,6 @@ impl DataFlowGraph { self.blocks.insert(BasicBlock::new(Vec::new())) } - /// Creates a new basic block with the given parameters. - /// After being created, the block is unreachable in the current function - /// until another block is made to jump to it. - pub(crate) fn make_block_with_parameters( - &mut self, - parameter_types: impl Iterator, - ) -> BasicBlockId { - self.blocks.insert_with_id(|entry_block| { - let parameters = vecmap(parameter_types.enumerate(), |(position, typ)| { - self.values.insert(Value::Param { block: entry_block, position, typ }) - }); - - BasicBlock::new(parameters) - }) - } - /// Get an iterator over references to each basic block within the dfg, paired with the basic /// block's id. /// @@ -95,6 +78,7 @@ impl DataFlowGraph { self.blocks.iter() } + /// Returns the parameters of the given block pub(crate) fn block_parameters(&self, block: BasicBlockId) -> &[ValueId] { self.blocks[block].parameters() } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs index 9a0916f62c8..dba656838b8 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs @@ -279,7 +279,7 @@ mod tests { builder.terminate_with_return(vec![]); let ssa = builder.finish(); - let func = ssa.functions.first().unwrap(); + let func = ssa.main(); let block0_id = func.entry_block(); let dt = DominatorTree::with_function(func); @@ -383,7 +383,7 @@ mod tests { builder.terminate_with_jmp(block1_id, vec![]); let ssa = builder.finish(); - let func = ssa.functions.first().unwrap(); + let func = ssa.main(); let block0_id = func.entry_block(); let dt = DominatorTree::with_function(func); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs index 8d90a139118..f37448462b7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs @@ -2,6 +2,7 @@ use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; use super::map::Id; use super::types::Type; +use super::value::ValueId; /// A function holds a list of instructions. /// These instructions are further grouped into Basic blocks @@ -10,7 +11,7 @@ use super::types::Type; /// To reference external functions its FunctionId can be used but this /// cannot be checked for correctness until inlining is performed. #[derive(Debug)] -pub struct Function { +pub(crate) struct Function { /// The first basic block in the function entry_block: BasicBlockId, @@ -54,6 +55,12 @@ impl Function { pub(crate) fn entry_block(&self) -> BasicBlockId { self.entry_block } + + /// Returns the parameters of this function. + /// The parameters will always match that of this function's entry block. + pub(crate) fn parameters(&self) -> &[ValueId] { + self.dfg.block_parameters(self.entry_block) + } } /// FunctionId is a reference for a function diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 66f8b1e3b17..812d12b23a3 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,4 +1,5 @@ use acvm::acir::BlackBoxFunc; +use iter_extended::vecmap; use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; @@ -114,6 +115,42 @@ impl Instruction { Instruction::Load { .. } | Instruction::Call { .. } => InstructionResultType::Unknown, } } + + /// True if this instruction requires specifying the control type variables when + /// inserting this instruction into a DataFlowGraph. + pub(crate) fn requires_ctrl_typevars(&self) -> bool { + matches!(self.result_type(), InstructionResultType::Unknown) + } + + /// Maps each ValueId inside this instruction to a new ValueId, returning the new instruction. + /// Note that the returned instruction is fresh and will not have an assigned InstructionId + /// until it is manually inserted in a DataFlowGraph later. + pub(crate) fn map_values(&self, mut f: impl FnMut(ValueId) -> ValueId) -> Instruction { + match self { + Instruction::Binary(binary) => Instruction::Binary(Binary { + lhs: f(binary.lhs), + rhs: f(binary.rhs), + operator: binary.operator, + }), + Instruction::Cast(value, typ) => Instruction::Cast(f(*value), *typ), + Instruction::Not(value) => Instruction::Not(f(*value)), + Instruction::Truncate { value, bit_size, max_bit_size } => Instruction::Truncate { + value: f(*value), + bit_size: *bit_size, + max_bit_size: *max_bit_size, + }, + Instruction::Constrain(value) => Instruction::Constrain(f(*value)), + Instruction::Call { func, arguments } => Instruction::Call { + func: f(*func), + arguments: vecmap(arguments.iter().copied(), f), + }, + Instruction::Allocate { size } => Instruction::Allocate { size: *size }, + Instruction::Load { address } => Instruction::Load { address: f(*address) }, + Instruction::Store { address, value } => { + Instruction::Store { address: f(*address), value: f(*value) } + } + } + } } /// The possible return values for Instruction::return_types @@ -191,7 +228,7 @@ impl Binary { /// All binary operators are also only for numeric types. To implement /// e.g. equality for a compound type like a struct, one must add a /// separate Eq operation for each field and combine them later with And. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub(crate) enum BinaryOp { /// Addition of lhs + rhs. Add, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs index 14ea521359d..43baf4430c7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs @@ -45,6 +45,18 @@ impl std::hash::Hash for Id { } } +impl PartialOrd for Id { + fn partial_cmp(&self, other: &Self) -> Option { + self.index.partial_cmp(&other.index) + } +} + +impl Ord for Id { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.index.cmp(&other.index) + } +} + impl Eq for Id {} impl PartialEq for Id { @@ -272,6 +284,12 @@ pub(crate) struct AtomicCounter { } impl AtomicCounter { + /// Create a new counter starting after the given Id. + /// Use AtomicCounter::default() to start at zero. + pub(crate) fn starting_after(id: Id) -> Self { + Self { next: AtomicUsize::new(id.index + 1), _marker: Default::default() } + } + /// Return the next fresh id pub(crate) fn next(&self) -> Id { Id::new(self.next.fetch_add(1, Ordering::Relaxed)) diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs new file mode 100644 index 00000000000..6e7c9848748 --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs @@ -0,0 +1,420 @@ +//! This module defines the function inlining pass for the SSA IR. +//! The purpose of this pass is to inline the instructions of each function call +//! within the function caller. If all function calls are known, there will only +//! be a single function remaining when the pass finishes. +use std::collections::{HashMap, HashSet}; + +use iter_extended::vecmap; + +use crate::ssa_refactor::{ + ir::{ + basic_block::BasicBlockId, + function::{Function, FunctionId}, + instruction::{Instruction, InstructionId, TerminatorInstruction}, + value::{Value, ValueId}, + }, + ssa_builder::FunctionBuilder, + ssa_gen::Ssa, +}; + +/// An arbitrary limit to the maximum number of recursive call +/// frames at any point in time. +const RECURSION_LIMIT: u32 = 1000; + +impl Ssa { + /// Inline all functions within the IR. + /// + /// In the case of recursive functions, this will attempt + /// to recursively inline until the RECURSION_LIMIT is reached. + /// + /// Functions are recursively inlined into main until either we finish + /// inlining all functions or we encounter a function whose function id is not known. + /// When the later happens, the call instruction is kept in addition to the function + /// it refers to. The function it refers to is kept unmodified without any inlining + /// changes. This is because if the function's id later becomes known by a later + /// pass, we would need to re-run all of inlining anyway to inline it, so we might + /// as well save the work for later instead of performing it twice. + pub(crate) fn inline_functions(self) -> Ssa { + InlineContext::new(&self).inline_all(self) + } +} + +/// The context for the function inlining pass. +/// +/// This works using an internal FunctionBuilder to build a new main function from scratch. +/// Doing it this way properly handles importing instructions between functions and lets us +/// reuse the existing API at the cost of essentially cloning each of main's instructions. +struct InlineContext { + recursion_level: u32, + builder: FunctionBuilder, + + /// True if we failed to inline at least one call. If this is still false when finishing + /// inlining we can remove all other functions from the resulting Ssa struct and keep only + /// the function that was inlined into. + failed_to_inline_a_call: bool, +} + +/// The per-function inlining context contains information that is only valid for one function. +/// For example, each function has its own DataFlowGraph, and thus each function needs a translation +/// layer to translate between BlockId to BlockId for the current function and the function to +/// inline into. The same goes for ValueIds, InstructionIds, and for storing other data like +/// parameter to argument mappings. +struct PerFunctionContext<'function> { + /// The source function is the function we're currently inlining into the function being built. + source_function: &'function Function, + + /// The shared inlining context for all functions. This notably contains the FunctionBuilder used + /// to build the function we're inlining into. + context: &'function mut InlineContext, + + /// Maps ValueIds in the function being inlined to the new ValueIds to use in the function + /// being inlined into. This mapping also contains the mapping from parameter values to + /// argument values. + values: HashMap, + + /// Maps BasicBlockIds in the function being inlined to the new BasicBlockIds to use in the + /// function being inlined into. + blocks: HashMap, + + /// Maps InstructionIds from the function being inlined to the function being inlined into. + instructions: HashMap, + + /// The TerminatorInstruction::Return in the source_function will be mapped to a jmp to + /// this block in the destination function instead. + return_destination: BasicBlockId, + + /// True if we're currently working on the main function. + inlining_main: bool, +} + +impl InlineContext { + /// Create a new context object for the function inlining pass. + /// This starts off with an empty mapping of instructions for main's parameters. + /// The function being inlined into will always be the main function, although it is + /// actually a copy that is created in case the original main is still needed from a function + /// that could not be inlined calling it. + fn new(ssa: &Ssa) -> InlineContext { + let main_name = ssa.main().name().to_owned(); + let builder = FunctionBuilder::new(main_name, ssa.next_id.next()); + Self { builder, recursion_level: 0, failed_to_inline_a_call: false } + } + + /// Start inlining the main function and all functions reachable from it. + fn inline_all(mut self, ssa: Ssa) -> Ssa { + let main = ssa.main(); + let mut context = PerFunctionContext::new(&mut self, main); + context.inlining_main = true; + + // The main block is already inserted so we have to add it to context.blocks and add + // its parameters here. Failing to do so would cause context.translate_block() to add + // a fresh block for the entry block rather than use the existing one. + let entry_block = context.context.builder.current_function.entry_block(); + let original_parameters = context.source_function.parameters(); + + for parameter in original_parameters { + let typ = context.source_function.dfg.type_of_value(*parameter); + let new_parameter = context.context.builder.add_block_parameter(entry_block, typ); + context.values.insert(*parameter, new_parameter); + } + + context.blocks.insert(context.source_function.entry_block(), entry_block); + context.inline_blocks(&ssa); + self.finish(ssa) + } + + /// Inlines a function into the current function and returns the translated return values + /// of the inlined function. + fn inline_function(&mut self, ssa: &Ssa, id: FunctionId, arguments: &[ValueId]) -> &[ValueId] { + self.recursion_level += 1; + + if self.recursion_level > RECURSION_LIMIT { + panic!( + "Attempted to recur more than {RECURSION_LIMIT} times during function inlining." + ); + } + + let source_function = &ssa.functions[&id]; + let mut context = PerFunctionContext::new(self, source_function); + + let parameters = source_function.parameters(); + assert_eq!(parameters.len(), arguments.len()); + context.values = parameters.iter().copied().zip(arguments.iter().copied()).collect(); + + let current_block = context.context.builder.current_block(); + context.blocks.insert(source_function.entry_block(), current_block); + + context.inline_blocks(ssa); + let return_destination = context.return_destination; + self.builder.block_parameters(return_destination) + } + + /// Finish inlining and return the new Ssa struct with the inlined version of main. + /// If any functions failed to inline, they are not removed from the final Ssa struct. + fn finish(self, mut ssa: Ssa) -> Ssa { + let mut new_ssa = self.builder.finish(); + assert_eq!(new_ssa.functions.len(), 1); + + // If we failed to inline any call, any function may still be reachable so we + // don't remove any from the final program. We could be more precise here and + // do a reachability analysis but it should be fine to keep the extra functions + // around longer if they are not called. + if self.failed_to_inline_a_call { + let new_main = new_ssa.functions.pop_first().unwrap().1; + ssa.main_id = new_main.id(); + ssa.functions.insert(new_main.id(), new_main); + ssa + } else { + new_ssa + } + } +} + +impl<'function> PerFunctionContext<'function> { + /// Create a new PerFunctionContext from the source function. + /// The value and block mappings for this context are initially empty except + /// for containing the mapping between parameters in the source_function and + /// the arguments of the destination function. + fn new(context: &'function mut InlineContext, source_function: &'function Function) -> Self { + // Create the block to return to but don't insert its parameters until we + // have the types of the actual return values later. + Self { + return_destination: context.builder.insert_block(), + context, + source_function, + blocks: HashMap::new(), + instructions: HashMap::new(), + values: HashMap::new(), + inlining_main: false, + } + } + + /// Translates a ValueId from the function being inlined to a ValueId of the function + /// being inlined into. Note that this expects value ids for all Value::Instruction and + /// Value::Param values are already handled as a result of previous inlining of instructions + /// and blocks respectively. If these assertions trigger it means a value is being used before + /// the instruction or block that defines the value is inserted. + fn translate_value(&mut self, id: ValueId) -> ValueId { + if let Some(value) = self.values.get(&id) { + return *value; + } + + let new_value = match &self.source_function.dfg[id] { + value @ Value::Instruction { .. } => { + unreachable!("All Value::Instructions should already be known during inlining after creating the original inlined instruction. Unknown value {id} = {value:?}") + } + value @ Value::Param { .. } => { + unreachable!("All Value::Params should already be known from previous calls to translate_block. Unknown value {id} = {value:?}") + } + Value::NumericConstant { constant, typ } => { + let value = self.source_function.dfg[*constant].value(); + self.context.builder.numeric_constant(value, *typ) + } + Value::Function(function) => self.context.builder.import_function(*function), + Value::Intrinsic(intrinsic) => self.context.builder.import_intrinsic_id(*intrinsic), + }; + + self.values.insert(id, new_value); + new_value + } + + /// Translate a block id from the source function to one of the target function. + /// + /// If the block isn't already known, this will insert a new block into the target function + /// with the same parameter types as the source block. + fn translate_block( + &mut self, + source_block: BasicBlockId, + block_queue: &mut Vec, + ) -> BasicBlockId { + if let Some(block) = self.blocks.get(&source_block) { + return *block; + } + + // The block is not yet inlined, queue it + block_queue.push(source_block); + + // The block is not already present in the function being inlined into so we must create it. + // The block's instructions are not copied over as they will be copied later in inlining. + let new_block = self.context.builder.insert_block(); + let original_parameters = self.source_function.dfg.block_parameters(source_block); + + for parameter in original_parameters { + let typ = self.source_function.dfg.type_of_value(*parameter); + let new_parameter = self.context.builder.add_block_parameter(new_block, typ); + self.values.insert(*parameter, new_parameter); + } + + self.blocks.insert(source_block, new_block); + new_block + } + + /// Try to retrieve the function referred to by the given Id. + /// Expects that the given ValueId belongs to the source_function. + /// + /// Returns None if the id is not known to refer to a function. + fn get_function(&mut self, mut id: ValueId) -> Option { + id = self.translate_value(id); + match self.context.builder[id] { + Value::Function(id) => Some(id), + Value::Intrinsic(_) => None, + _ => { + self.context.failed_to_inline_a_call = true; + None + } + } + } + + /// Inline all reachable blocks within the source_function into the destination function. + fn inline_blocks(&mut self, ssa: &Ssa) { + let mut seen_blocks = HashSet::new(); + let mut block_queue = vec![self.source_function.entry_block()]; + + while let Some(source_block_id) = block_queue.pop() { + let translated_block_id = self.translate_block(source_block_id, &mut block_queue); + self.context.builder.switch_to_block(translated_block_id); + + seen_blocks.insert(source_block_id); + self.inline_block(ssa, source_block_id); + self.handle_terminator_instruction(source_block_id, &mut block_queue); + } + + self.context.builder.switch_to_block(self.return_destination); + } + + /// Inline each instruction in the given block into the function being inlined into. + /// This may recurse if it finds another function to inline if a call instruction is within this block. + fn inline_block(&mut self, ssa: &Ssa, block_id: BasicBlockId) { + let block = &self.source_function.dfg[block_id]; + for id in block.instructions() { + match &self.source_function.dfg[*id] { + Instruction::Call { func, arguments } => match self.get_function(*func) { + Some(function) => self.inline_function(ssa, *id, function, arguments), + None => self.push_instruction(*id), + }, + _ => self.push_instruction(*id), + } + } + } + + /// Inline a function call and remember the inlined return values in the values map + fn inline_function( + &mut self, + ssa: &Ssa, + call_id: InstructionId, + function: FunctionId, + arguments: &[ValueId], + ) { + let old_results = self.source_function.dfg.instruction_results(call_id); + let new_results = self.context.inline_function(ssa, function, arguments); + Self::insert_new_instruction_results(&mut self.values, old_results, new_results); + } + + /// Push the given instruction from the source_function into the current block of the + /// function being inlined into. + fn push_instruction(&mut self, id: InstructionId) { + let instruction = self.source_function.dfg[id].map_values(|id| self.translate_value(id)); + let results = self.source_function.dfg.instruction_results(id); + + let ctrl_typevars = instruction + .requires_ctrl_typevars() + .then(|| vecmap(results, |result| self.source_function.dfg.type_of_value(*result))); + + let new_results = self.context.builder.insert_instruction(instruction, ctrl_typevars); + Self::insert_new_instruction_results(&mut self.values, results, new_results); + } + + /// Modify the values HashMap to remember the mapping between an instruction result's previous + /// ValueId (from the source_function) and its new ValueId in the destination function. + fn insert_new_instruction_results( + values: &mut HashMap, + old_results: &[ValueId], + new_results: &[ValueId], + ) { + assert_eq!(old_results.len(), new_results.len()); + for (old_result, new_result) in old_results.iter().zip(new_results) { + values.insert(*old_result, *new_result); + } + } + + /// Handle the given terminator instruction from the given source function block. + /// This will push any new blocks to the destination function as needed, add them + /// to the block queue, and set the terminator instruction for the current block. + fn handle_terminator_instruction( + &mut self, + block_id: BasicBlockId, + block_queue: &mut Vec, + ) { + match self.source_function.dfg[block_id].terminator() { + Some(TerminatorInstruction::Jmp { destination, arguments }) => { + let destination = self.translate_block(*destination, block_queue); + let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); + self.context.builder.terminate_with_jmp(destination, arguments); + } + Some(TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + }) => { + let condition = self.translate_value(*condition); + let then_block = self.translate_block(*then_destination, block_queue); + let else_block = self.translate_block(*else_destination, block_queue); + self.context.builder.terminate_with_jmpif(condition, then_block, else_block); + } + Some(TerminatorInstruction::Return { return_values }) => { + let return_values = vecmap(return_values, |value| self.translate_value(*value)); + + if self.inlining_main { + self.context.builder.terminate_with_return(return_values); + } else { + for value in &return_values { + // Add the block parameters for the return block here since we don't do + // it when inserting the block in PerFunctionContext::new + let typ = self.context.builder.current_function.dfg.type_of_value(*value); + self.context.builder.add_block_parameter(self.return_destination, typ); + } + self.context.builder.terminate_with_jmp(self.return_destination, return_values); + } + } + None => unreachable!("Block has no terminator instruction"), + } + } +} + +#[cfg(test)] +mod test { + use crate::ssa_refactor::{ + ir::{map::Id, types::Type}, + ssa_builder::FunctionBuilder, + }; + + #[test] + fn basic_inlining() { + // fn foo { + // b0(): + // v0 = call bar() + // return v0 + // } + // fn bar { + // b0(): + // return 72 + // } + let foo_id = Id::test_new(0); + let mut builder = FunctionBuilder::new("foo".into(), foo_id); + + let bar_id = Id::test_new(1); + let bar = builder.import_function(bar_id); + let results = builder.insert_call(bar, Vec::new(), vec![Type::field()]).to_vec(); + builder.terminate_with_return(results); + + builder.new_function("bar".into(), bar_id); + let expected_return = 72u128; + let seventy_two = builder.field_constant(expected_return); + builder.terminate_with_return(vec![seventy_two]); + + let ssa = builder.finish(); + assert_eq!(ssa.functions.len(), 2); + + let inlined = ssa.inline_functions(); + assert_eq!(inlined.functions.len(), 1); + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/opt/mod.rs new file mode 100644 index 00000000000..46ca7d443bc --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/opt/mod.rs @@ -0,0 +1,6 @@ +//! This folder contains each optimization pass for the SSA IR. +//! +//! Each pass is generally expected to mutate the SSA IR into a gradually +//! simpler form until the IR only has a single function remaining with 1 block within it. +//! Generally, these passes are also expected to minimize the final amount of instructions. +mod inlining; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index aa67cbed583..f621503e59a 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -9,7 +9,10 @@ use crate::ssa_refactor::ir::{ }; use super::{ - ir::instruction::{InstructionId, Intrinsic}, + ir::{ + basic_block::BasicBlock, + instruction::{InstructionId, Intrinsic}, + }, ssa_gen::Ssa, }; @@ -95,8 +98,13 @@ impl FunctionBuilder { self.current_function.dfg.add_block_parameter(block, typ) } + /// Returns the parameters of the given block in the current function. + pub(crate) fn block_parameters(&self, block: BasicBlockId) -> &[ValueId] { + self.current_function.dfg.block_parameters(block) + } + /// Inserts a new instruction at the end of the current block and returns its results - fn insert_instruction( + pub(crate) fn insert_instruction( &mut self, instruction: Instruction, ctrl_typevars: Option>, @@ -113,6 +121,11 @@ impl FunctionBuilder { self.current_block = block; } + /// Returns the block currently being inserted into + pub(crate) fn current_block(&mut self) -> BasicBlockId { + self.current_block + } + /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. @@ -228,8 +241,12 @@ impl FunctionBuilder { /// Retrieve a value reference to the given intrinsic operation. /// Returns None if there is no intrinsic matching the given name. pub(crate) fn import_intrinsic(&mut self, name: &str) -> Option { - Intrinsic::lookup(name) - .map(|intrinsic| self.current_function.dfg.import_intrinsic(intrinsic)) + Intrinsic::lookup(name).map(|intrinsic| self.import_intrinsic_id(intrinsic)) + } + + /// Retrieve a value reference to the given intrinsic operation. + pub(crate) fn import_intrinsic_id(&mut self, intrinsic: Intrinsic) -> ValueId { + self.current_function.dfg.import_intrinsic(intrinsic) } /// Removes the given instruction from the current block or panics otherwise. @@ -253,3 +270,11 @@ impl std::ops::Index for FunctionBuilder { &self.current_function.dfg[id] } } + +impl std::ops::Index for FunctionBuilder { + type Output = BasicBlock; + + fn index(&self, id: BasicBlockId) -> &Self::Output { + &self.current_function.dfg[id] + } +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 80fde837f9f..34317b7df2d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -2,7 +2,7 @@ mod context; mod program; mod value; -pub use program::Ssa; +pub(crate) use program::Ssa; use context::SharedContext; use iter_extended::vecmap; @@ -19,7 +19,7 @@ use super::ir::{instruction::BinaryOp, types::Type, value::ValueId}; /// Generates SSA for the given monomorphized program. /// /// This function will generate the SSA but does not perform any optimizations on it. -pub fn generate_ssa(program: Program) -> Ssa { +pub(crate) fn generate_ssa(program: Program) -> Ssa { let context = SharedContext::new(program); let main_id = Program::main_id(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs index de4f01fc613..7f4b9a8dd25 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs @@ -1,22 +1,43 @@ -use std::fmt::Display; +use std::{collections::BTreeMap, fmt::Display}; -use crate::ssa_refactor::ir::function::Function; +use iter_extended::btree_map; + +use crate::ssa_refactor::ir::{ + function::{Function, FunctionId}, + map::AtomicCounter, +}; /// Contains the entire SSA representation of the program. -pub struct Ssa { - pub functions: Vec, +pub(crate) struct Ssa { + pub(crate) functions: BTreeMap, + pub(crate) main_id: FunctionId, + pub(crate) next_id: AtomicCounter, } impl Ssa { - /// Create a new Ssa object from the given SSA functions - pub fn new(functions: Vec) -> Self { - Self { functions } + /// Create a new Ssa object from the given SSA functions. + /// The first function in this vector is expected to be the main function. + pub(crate) fn new(functions: Vec) -> Self { + let main_id = functions.first().expect("Expected at least 1 SSA function").id(); + let mut max_id = main_id; + + let functions = btree_map(functions, |f| { + max_id = std::cmp::max(max_id, f.id()); + (f.id(), f) + }); + + Self { functions, main_id, next_id: AtomicCounter::starting_after(max_id) } + } + + /// Returns the entry-point function of the program + pub(crate) fn main(&self) -> &Function { + &self.functions[&self.main_id] } } impl Display for Ssa { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - for function in &self.functions { + for function in self.functions.values() { writeln!(f, "{function}")?; } Ok(()) From e123aa7a46251796f91a683a4db402fcf0e5c006 Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Tue, 9 May 2023 18:42:46 +0100 Subject: [PATCH 54/66] chore(parser): Parser error optimisation (#1292) * chore(parser): optimize errors by: - switching labels to enums - Using LateAllocSet in place of BTreeSet * chore(parser): wrap LateAllocSet enum in struct * chore(parser): fix comment table formatting * chore(parser): ParserError to use SmallOrdSet * chore(parser): ParserLabel -> ParsingRuleLabel * chore(parser): tidy iter usage * chore(parser): tweak SmallOrdSet sizes --- Cargo.lock | 10 +++ crates/noirc_frontend/Cargo.toml | 1 + crates/noirc_frontend/src/ast/mod.rs | 3 + .../src/hir/resolution/errors.rs | 4 +- .../src/hir/resolution/resolver.rs | 2 +- crates/noirc_frontend/src/lexer/token.rs | 2 +- crates/noirc_frontend/src/parser/errors.rs | 39 ++++++---- crates/noirc_frontend/src/parser/labels.rs | 42 ++++++++++ crates/noirc_frontend/src/parser/mod.rs | 1 + crates/noirc_frontend/src/parser/parser.rs | 78 ++++++++++++------- 10 files changed, 132 insertions(+), 50 deletions(-) create mode 100644 crates/noirc_frontend/src/parser/labels.rs diff --git a/Cargo.lock b/Cargo.lock index 6fe210ab93a..7a631368870 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1981,6 +1981,7 @@ dependencies = [ "noirc_errors", "rustc-hash", "serde", + "small-ord-set", "smol_str", "strum", "strum_macros", @@ -2898,6 +2899,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "small-ord-set" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf7035a2b2268a5be8c1395738565b06beda836097e12021cdefc06b127a0e7e" +dependencies = [ + "smallvec", +] + [[package]] name = "smallvec" version = "1.10.0" diff --git a/crates/noirc_frontend/Cargo.toml b/crates/noirc_frontend/Cargo.toml index b5551d17f51..f3fc1c83758 100644 --- a/crates/noirc_frontend/Cargo.toml +++ b/crates/noirc_frontend/Cargo.toml @@ -18,6 +18,7 @@ thiserror.workspace = true smol_str.workspace = true serde.workspace = true rustc-hash = "1.1.0" +small-ord-set = "0.1.3" [dev-dependencies] strum = "0.24" diff --git a/crates/noirc_frontend/src/ast/mod.rs b/crates/noirc_frontend/src/ast/mod.rs index 37eb944e0c6..24004e34ffa 100644 --- a/crates/noirc_frontend/src/ast/mod.rs +++ b/crates/noirc_frontend/src/ast/mod.rs @@ -152,6 +152,9 @@ pub enum Signedness { } impl UnresolvedTypeExpression { + // This large error size is justified because it improves parsing speeds by around 40% in + // release mode. See `ParserError` definition for further explanation. + #[allow(clippy::result_large_err)] pub fn from_expr( expr: Expression, span: Span, diff --git a/crates/noirc_frontend/src/hir/resolution/errors.rs b/crates/noirc_frontend/src/hir/resolution/errors.rs index c57e4c890d2..87257cbb842 100644 --- a/crates/noirc_frontend/src/hir/resolution/errors.rs +++ b/crates/noirc_frontend/src/hir/resolution/errors.rs @@ -57,7 +57,7 @@ pub enum ResolverError { #[error("Incorrect amount of arguments to generic type constructor")] IncorrectGenericCount { span: Span, struct_type: String, actual: usize, expected: usize }, #[error("{0}")] - ParserError(ParserError), + ParserError(Box), #[error("Function is not defined in a contract yet sets its contract visibility")] ContractFunctionTypeInNormalFunction { span: Span }, } @@ -252,7 +252,7 @@ impl From for Diagnostic { span, ) } - ResolverError::ParserError(error) => error.into(), + ResolverError::ParserError(error) => (*error).into(), ResolverError::ContractFunctionTypeInNormalFunction { span } => Diagnostic::simple_error( "Only functions defined within contracts can set their contract function type".into(), "Non-contract functions cannot be 'open'".into(), diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index f03bcefeb2d..d80bca9df17 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -859,7 +859,7 @@ impl<'a> Resolver<'a> { let span = length.span; let length = UnresolvedTypeExpression::from_expr(*length, span).unwrap_or_else( |error| { - self.errors.push(ResolverError::ParserError(error)); + self.errors.push(ResolverError::ParserError(Box::new(error))); UnresolvedTypeExpression::Constant(0, span) }, ); diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index bfcd0f4be51..fe0e3bf1f90 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -189,7 +189,7 @@ impl fmt::Display for Token { } } -#[derive(PartialEq, Eq, Hash, Debug, Clone)] +#[derive(PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] /// The different kinds of tokens that are possible in the target language pub enum TokenKind { Token(Token), diff --git a/crates/noirc_frontend/src/parser/errors.rs b/crates/noirc_frontend/src/parser/errors.rs index c339835fbc3..d4a294482a8 100644 --- a/crates/noirc_frontend/src/parser/errors.rs +++ b/crates/noirc_frontend/src/parser/errors.rs @@ -1,13 +1,14 @@ -use std::collections::BTreeSet; - use crate::lexer::token::Token; use crate::Expression; +use small_ord_set::SmallOrdSet; use thiserror::Error; use iter_extended::vecmap; use noirc_errors::CustomDiagnostic as Diagnostic; use noirc_errors::Span; +use super::labels::ParsingRuleLabel; + #[derive(Debug, Clone, PartialEq, Eq, Error)] pub enum ParserErrorReason { #[error("Arrays must have at least one element")] @@ -22,10 +23,22 @@ pub enum ParserErrorReason { InvalidArrayLengthExpression(Expression), } +/// Represents a parsing error, or a parsing error in the making. +/// +/// `ParserError` is used extensively by the parser, as it not only used to report badly formed +/// token streams, but also as a general intermediate that accumulates information as various +/// parsing rules are tried. This struct is constructed and destructed with a very high frequency +/// and as such, the time taken to do so significantly impacts parsing performance. For this +/// reason we use `SmallOrdSet` to avoid heap allocations for as long as possible - this greatly +/// inflates the size of the error, but this is justified by a resulting increase in parsing +/// speeds of approximately 40% in release mode. +/// +/// Both `expected_tokens` and `expected_labels` use `SmallOrdSet` sized 1. In the of labels this +/// is optimal. In the of tokens we stop here due to fast diminishing returns. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ParserError { - expected_tokens: BTreeSet, - expected_labels: BTreeSet, + expected_tokens: SmallOrdSet<[Token; 1]>, + expected_labels: SmallOrdSet<[ParsingRuleLabel; 1]>, found: Token, reason: Option, span: Span, @@ -34,21 +47,15 @@ pub struct ParserError { impl ParserError { pub fn empty(found: Token, span: Span) -> ParserError { ParserError { - expected_tokens: BTreeSet::new(), - expected_labels: BTreeSet::new(), + expected_tokens: SmallOrdSet::new(), + expected_labels: SmallOrdSet::new(), found, reason: None, span, } } - pub fn expected(token: Token, found: Token, span: Span) -> ParserError { - let mut error = ParserError::empty(found, span); - error.expected_tokens.insert(token); - error - } - - pub fn expected_label(label: String, found: Token, span: Span) -> ParserError { + pub fn expected_label(label: ParsingRuleLabel, found: Token, span: Span) -> ParserError { let mut error = ParserError::empty(found, span); error.expected_labels.insert(label); error @@ -64,7 +71,7 @@ impl ParserError { impl std::fmt::Display for ParserError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut expected = vecmap(&self.expected_tokens, ToString::to_string); - expected.append(&mut vecmap(&self.expected_labels, Clone::clone)); + expected.append(&mut vecmap(&self.expected_labels, |label| format!("{label}"))); if expected.is_empty() { write!(f, "Unexpected {} in input", self.found) @@ -112,7 +119,7 @@ impl From for Diagnostic { impl chumsky::Error for ParserError { type Span = Span; - type Label = String; + type Label = ParsingRuleLabel; fn expected_input_found(span: Self::Span, expected: Iter, found: Option) -> Self where @@ -120,7 +127,7 @@ impl chumsky::Error for ParserError { { ParserError { expected_tokens: expected.into_iter().map(|opt| opt.unwrap_or(Token::EOF)).collect(), - expected_labels: BTreeSet::new(), + expected_labels: SmallOrdSet::new(), found: found.unwrap_or(Token::EOF), reason: None, span, diff --git a/crates/noirc_frontend/src/parser/labels.rs b/crates/noirc_frontend/src/parser/labels.rs new file mode 100644 index 00000000000..b43c10fb9e7 --- /dev/null +++ b/crates/noirc_frontend/src/parser/labels.rs @@ -0,0 +1,42 @@ +use std::fmt; + +use crate::token::TokenKind; + +/// Used to annotate parsing rules with extra context that can be presented to the user later in +/// the case of an error. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub enum ParsingRuleLabel { + Atom, + BinaryOperator, + Cast, + Expression, + FieldAccess, + Global, + IntegerType, + Parameter, + Pattern, + Statement, + Term, + TypeExpression, + TokenKind(TokenKind), +} + +impl fmt::Display for ParsingRuleLabel { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ParsingRuleLabel::Atom => write!(f, "atom"), + ParsingRuleLabel::BinaryOperator => write!(f, "binary operator"), + ParsingRuleLabel::Cast => write!(f, "cast"), + ParsingRuleLabel::Expression => write!(f, "expression"), + ParsingRuleLabel::FieldAccess => write!(f, "field access"), + ParsingRuleLabel::Global => write!(f, "global"), + ParsingRuleLabel::IntegerType => write!(f, "integer type"), + ParsingRuleLabel::Parameter => write!(f, "parameter"), + ParsingRuleLabel::Pattern => write!(f, "pattern"), + ParsingRuleLabel::Statement => write!(f, "statement"), + ParsingRuleLabel::Term => write!(f, "term"), + ParsingRuleLabel::TypeExpression => write!(f, "type expression"), + ParsingRuleLabel::TokenKind(token_kind) => write!(f, "{:?}", token_kind), + } + } +} diff --git a/crates/noirc_frontend/src/parser/mod.rs b/crates/noirc_frontend/src/parser/mod.rs index 98b7fffbf14..a8b7f43fa5c 100644 --- a/crates/noirc_frontend/src/parser/mod.rs +++ b/crates/noirc_frontend/src/parser/mod.rs @@ -7,6 +7,7 @@ //! This file is mostly helper functions and types for the parser. For the parser itself, //! see parser.rs. The definition of the abstract syntax tree can be found in the `ast` folder. mod errors; +mod labels; #[allow(clippy::module_inception)] mod parser; diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 3a8c8f49303..98b45247567 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -24,9 +24,10 @@ //! be limited to cases like the above `fn` example where it is clear we shouldn't back out of the //! current parser to try alternative parsers in a `choice` expression. use super::{ - foldl_with_span, parameter_name_recovery, parameter_recovery, parenthesized, then_commit, - then_commit_ignore, top_level_statement_recovery, ExprParser, ForRange, NoirParser, - ParsedModule, ParserError, ParserErrorReason, Precedence, SubModule, TopLevelStatement, + foldl_with_span, labels::ParsingRuleLabel, parameter_name_recovery, parameter_recovery, + parenthesized, then_commit, then_commit_ignore, top_level_statement_recovery, ExprParser, + ForRange, NoirParser, ParsedModule, ParserError, ParserErrorReason, Precedence, SubModule, + TopLevelStatement, }; use crate::ast::{Expression, ExpressionKind, LetStatement, Statement, UnresolvedType}; use crate::lexer::Lexer; @@ -113,7 +114,7 @@ fn top_level_statement( /// global_declaration: 'global' ident global_type_annotation '=' literal fn global_declaration() -> impl NoirParser { let p = ignore_then_commit( - keyword(Keyword::Global).labelled("global"), + keyword(Keyword::Global).labelled(ParsingRuleLabel::Global), ident().map(Pattern::Identifier), ); let p = then_commit(p, global_type_annotation()); @@ -273,7 +274,10 @@ fn lambda_parameters() -> impl NoirParser> { .recover_via(parameter_name_recovery()) .then(typ.or_not().map(|typ| typ.unwrap_or(UnresolvedType::Unspecified))); - parameter.separated_by(just(Token::Comma)).allow_trailing().labelled("parameter") + parameter + .separated_by(just(Token::Comma)) + .allow_trailing() + .labelled(ParsingRuleLabel::Parameter) } fn function_parameters<'a>( @@ -292,7 +296,10 @@ fn function_parameters<'a>( let parameter = full_parameter.or(self_parameter); - parameter.separated_by(just(Token::Comma)).allow_trailing().labelled("parameter") + parameter + .separated_by(just(Token::Comma)) + .allow_trailing() + .labelled(ParsingRuleLabel::Parameter) } /// This parser always parses no input and fails @@ -308,7 +315,7 @@ fn self_parameter() -> impl NoirParser<(Pattern, UnresolvedType, AbiVisibility)> let self_type = UnresolvedType::Named(path, vec![]); Ok((Pattern::Identifier(ident), self_type, AbiVisibility::Private)) } - _ => Err(ParserError::expected_label("parameter".to_owned(), found, span)), + _ => Err(ParserError::expected_label(ParsingRuleLabel::Parameter, found, span)), }) } @@ -406,7 +413,11 @@ fn token_kind(token_kind: TokenKind) -> impl NoirParser { if found.kind() == token_kind { Ok(found) } else { - Err(ParserError::expected_label(token_kind.to_string(), found, span)) + Err(ParserError::expected_label( + ParsingRuleLabel::TokenKind(token_kind.clone()), + found, + span, + )) } }) } @@ -446,12 +457,15 @@ fn constrain<'a, P>(expr_parser: P) -> impl NoirParser + 'a where P: ExprParser + 'a, { - ignore_then_commit(keyword(Keyword::Constrain).labelled("statement"), expr_parser) - .map(|expr| Statement::Constrain(ConstrainStatement(expr))) - .validate(|expr, span, emit| { - emit(ParserError::with_reason(ParserErrorReason::ConstrainDeprecated, span)); - expr - }) + ignore_then_commit( + keyword(Keyword::Constrain).labelled(ParsingRuleLabel::Statement), + expr_parser, + ) + .map(|expr| Statement::Constrain(ConstrainStatement(expr))) + .validate(|expr, span, emit| { + emit(ParserError::with_reason(ParserErrorReason::ConstrainDeprecated, span)); + expr + }) } fn assertion<'a, P>(expr_parser: P) -> impl NoirParser + 'a @@ -459,7 +473,7 @@ where P: ExprParser + 'a, { ignore_then_commit(keyword(Keyword::Assert), parenthesized(expr_parser)) - .labelled("statement") + .labelled(ParsingRuleLabel::Statement) .map(|expr| Statement::Constrain(ConstrainStatement(expr))) } @@ -467,7 +481,8 @@ fn declaration<'a, P>(expr_parser: P) -> impl NoirParser + 'a where P: ExprParser + 'a, { - let p = ignore_then_commit(keyword(Keyword::Let).labelled("statement"), pattern()); + let p = + ignore_then_commit(keyword(Keyword::Let).labelled(ParsingRuleLabel::Statement), pattern()); let p = p.then(optional_type_annotation()); let p = then_commit_ignore(p, just(Token::Assign)); let p = then_commit(p, expr_parser); @@ -501,14 +516,15 @@ fn pattern() -> impl NoirParser { choice((mut_pattern, tuple_pattern, struct_pattern, ident_pattern)) }) - .labelled("pattern") + .labelled(ParsingRuleLabel::Pattern) } fn assignment<'a, P>(expr_parser: P) -> impl NoirParser + 'a where P: ExprParser + 'a, { - let fallible = lvalue(expr_parser.clone()).then(assign_operator()).labelled("statement"); + let fallible = + lvalue(expr_parser.clone()).then(assign_operator()).labelled(ParsingRuleLabel::Statement); then_commit(fallible, expr_parser).map_with_span( |((identifier, operator), expression), span| { @@ -623,7 +639,7 @@ fn int_type() -> impl NoirParser { .then(filter_map(|span, token: Token| match token { Token::IntType(int_type) => Ok(int_type), unexpected => { - Err(ParserError::expected_label("integer type".to_string(), unexpected, span)) + Err(ParserError::expected_label(ParsingRuleLabel::IntegerType, unexpected, span)) } })) .map(UnresolvedType::from_int_token) @@ -669,7 +685,7 @@ fn array_type(type_parser: impl NoirParser) -> impl NoirParser impl NoirParser { recursive(|expr| expression_with_precedence(Precedence::lowest_type_precedence(), expr, true)) - .labelled("type expression") + .labelled(ParsingRuleLabel::TypeExpression) .try_map(UnresolvedTypeExpression::from_expr) } @@ -695,7 +711,7 @@ where fn expression() -> impl ExprParser { recursive(|expr| expression_with_precedence(Precedence::Lowest, expr, false)) - .labelled("expression") + .labelled(ParsingRuleLabel::Expression) } // An expression is a single term followed by 0 or more (OP subexpression)* @@ -712,9 +728,9 @@ where { if precedence == Precedence::Highest { if is_type_expression { - type_expression_term(expr_parser).boxed().labelled("term") + type_expression_term(expr_parser).boxed().labelled(ParsingRuleLabel::Term) } else { - term(expr_parser).boxed().labelled("term") + term(expr_parser).boxed().labelled(ParsingRuleLabel::Term) } } else { let next_precedence = @@ -728,7 +744,7 @@ where .then(then_commit(operator_with_precedence(precedence), next_expr).repeated()) .foldl(create_infix_expression) .boxed() - .labelled("expression") + .labelled(ParsingRuleLabel::Expression) } } @@ -753,7 +769,7 @@ fn operator_with_precedence(precedence: Precedence) -> impl NoirParser(expr_parser: P) -> impl NoirParser From 12f3e7e5917fdcb6b8648032772a7541eaef4751 Mon Sep 17 00:00:00 2001 From: Ayush Shukla Date: Thu, 11 May 2023 19:20:53 +0400 Subject: [PATCH 55/66] fix: to-bits and to-radix for > 128 bits (#1312) * fix: to-bits and to-radix for > 128 bits * revert: use earlier overflow check that ensures unicity --- .../noirc_evaluator/src/ssa/optimizations.rs | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa/optimizations.rs b/crates/noirc_evaluator/src/ssa/optimizations.rs index 8dcc7cfecf0..f1cfca9c243 100644 --- a/crates/noirc_evaluator/src/ssa/optimizations.rs +++ b/crates/noirc_evaluator/src/ssa/optimizations.rs @@ -9,7 +9,7 @@ use crate::ssa::{ }, }; use acvm::FieldElement; -use num_bigint::ToBigUint; +use num_bigint::BigUint; pub(super) fn simplify_id(ctx: &mut SsaContext, ins_id: NodeId) -> Result<(), RuntimeError> { let mut ins = ctx.instruction(ins_id).clone(); @@ -74,14 +74,16 @@ pub(super) fn simplify(ctx: &mut SsaContext, ins: &mut Instruction) -> Result<() fn evaluate_intrinsic( ctx: &mut SsaContext, op: builtin::Opcode, - args: Vec, + args: Vec, res_type: &ObjectType, block_id: BlockId, ) -> Result, RuntimeErrorKind> { match op { builtin::Opcode::ToBits(_) => { - let bit_count = args[1] as u32; + let bit_count = args[1].to_u128() as u32; let mut result = Vec::new(); + let mut bits = args[0].bits(); + bits.reverse(); if let ObjectType::ArrayPointer(a) = res_type { for i in 0..bit_count { @@ -89,7 +91,7 @@ fn evaluate_intrinsic( FieldElement::from(i as i128), ObjectType::native_field(), ); - let op = if args[0] & (1 << i) != 0 { + let op = if i < bits.len() as u32 && bits[i as usize] { Operation::Store { array_id: *a, index, @@ -116,9 +118,10 @@ fn evaluate_intrinsic( ); } builtin::Opcode::ToRadix(endian) => { - let mut element = args[0].to_biguint().unwrap().to_radix_le(args[1] as u32); - let byte_count = args[2] as u32; - let diff = if byte_count > element.len() as u32 { + let mut element = BigUint::from_bytes_be(&args[0].to_be_bytes()) + .to_radix_le(args[1].to_u128() as u32); + let byte_count = args[2].to_u128() as u32; + let diff = if byte_count >= element.len() as u32 { byte_count - element.len() as u32 } else { return Err(RuntimeErrorKind::ArrayOutOfBounds { @@ -532,9 +535,8 @@ fn cse_block_with_anchor( // We do not simplify print statements builtin::Opcode::Println(_) => (), _ => { - let args = args.iter().map(|arg| { - NodeEval::from_id(ctx, *arg).into_const_value().map(|f| f.to_u128()) - }); + let args = + args.iter().map(|arg| NodeEval::from_id(ctx, *arg).into_const_value()); if let Some(args) = args.collect() { update2.mark = Mark::Deleted; From f33a0048b19f3f0f5ac0903adadeffead12334b0 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 11 May 2023 17:06:41 -0400 Subject: [PATCH 56/66] chore(ssa refactor): Fix inlining bug (#1335) Fix inlining bug --- crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs index 6e7c9848748..6848f84bb7b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs @@ -305,7 +305,8 @@ impl<'function> PerFunctionContext<'function> { arguments: &[ValueId], ) { let old_results = self.source_function.dfg.instruction_results(call_id); - let new_results = self.context.inline_function(ssa, function, arguments); + let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); + let new_results = self.context.inline_function(ssa, function, &arguments); Self::insert_new_instruction_results(&mut self.values, old_results, new_results); } From a6de557e83eb6318d091e40553bb3e2b3823fdc5 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 11 May 2023 17:21:25 -0400 Subject: [PATCH 57/66] feat: Issue an error when attempting to use a `return` expression (#1330) * Emit an error for the 'return' expression Fixes #1190 * Add fixes for return statement * Validate semicolon for `return` properly * Ran cargo fmt * Fix test failures from merge commit --------- Co-authored-by: Piotr Czarnecki --- crates/noirc_frontend/src/ast/expression.rs | 6 +++ crates/noirc_frontend/src/ast/statement.rs | 12 ++--- crates/noirc_frontend/src/parser/errors.rs | 2 + crates/noirc_frontend/src/parser/parser.rs | 49 +++++++++++++++++++++ 4 files changed, 61 insertions(+), 8 deletions(-) diff --git a/crates/noirc_frontend/src/ast/expression.rs b/crates/noirc_frontend/src/ast/expression.rs index 9be6f715a14..9e9ff2f592e 100644 --- a/crates/noirc_frontend/src/ast/expression.rs +++ b/crates/noirc_frontend/src/ast/expression.rs @@ -106,6 +106,12 @@ impl Recoverable for Expression { } } +impl Recoverable for Option { + fn error(span: Span) -> Self { + Some(Expression::new(ExpressionKind::Error, span)) + } +} + #[derive(Debug, Eq, Clone)] pub struct Expression { pub kind: ExpressionKind, diff --git a/crates/noirc_frontend/src/ast/statement.rs b/crates/noirc_frontend/src/ast/statement.rs index d4fabccea70..2792d51c41c 100644 --- a/crates/noirc_frontend/src/ast/statement.rs +++ b/crates/noirc_frontend/src/ast/statement.rs @@ -51,6 +51,8 @@ impl Statement { last_statement_in_block: bool, emit_error: &mut dyn FnMut(ParserError), ) -> Statement { + let missing_semicolon = + ParserError::with_reason(ParserErrorReason::MissingSeparatingSemi, span); match self { Statement::Let(_) | Statement::Constrain(_) @@ -59,10 +61,7 @@ impl Statement { | Statement::Error => { // To match rust, statements always require a semicolon, even at the end of a block if semi.is_none() { - emit_error(ParserError::with_reason( - ParserErrorReason::MissingSeparatingSemi, - span, - )); + emit_error(missing_semicolon); } self } @@ -85,10 +84,7 @@ impl Statement { // for unneeded expressions like { 1 + 2; 3 } (_, Some(_), false) => Statement::Expression(expr), (_, None, false) => { - emit_error(ParserError::with_reason( - ParserErrorReason::MissingSeparatingSemi, - span, - )); + emit_error(missing_semicolon); Statement::Expression(expr) } diff --git a/crates/noirc_frontend/src/parser/errors.rs b/crates/noirc_frontend/src/parser/errors.rs index d4a294482a8..e788893c58d 100644 --- a/crates/noirc_frontend/src/parser/errors.rs +++ b/crates/noirc_frontend/src/parser/errors.rs @@ -21,6 +21,8 @@ pub enum ParserErrorReason { ConstrainDeprecated, #[error("Expression is invalid in an array-length type: '{0}'. Only unsigned integer constants, globals, generics, +, -, *, /, and % may be used in this context.")] InvalidArrayLengthExpression(Expression), + #[error("Early 'return' is unsupported")] + EarlyReturn, } /// Represents a parsing error, or a parsing error in the making. diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 98b45247567..2044a02c68e 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -449,6 +449,7 @@ where assertion(expr_parser.clone()), declaration(expr_parser.clone()), assignment(expr_parser.clone()), + return_statement(expr_parser.clone()), expr_parser.map(Statement::Expression), )) } @@ -714,6 +715,18 @@ fn expression() -> impl ExprParser { .labelled(ParsingRuleLabel::Expression) } +fn return_statement<'a, P>(expr_parser: P) -> impl NoirParser + 'a +where + P: ExprParser + 'a, +{ + ignore_then_commit(keyword(Keyword::Return), expr_parser.or_not()) + .validate(|_, span, emit| { + emit(ParserError::with_reason(ParserErrorReason::EarlyReturn, span)); + Statement::Error + }) + .labelled(ParsingRuleLabel::Statement) +} + // An expression is a single term followed by 0 or more (OP subexpression)* // where OP is an operator at the given precedence level and subexpression // is an expression at the current precedence level plus one. @@ -1599,4 +1612,40 @@ mod test { ); } } + + #[test] + fn return_validation() { + let cases = vec![ + ("{ return 42; }", 1, "{\n Error\n}"), + ("{ return 1; return 2; }", 2, "{\n Error\n Error\n}"), + ( + "{ return 123; let foo = 4 + 3; }", + 1, + "{\n Error\n let foo: unspecified = (4 + 3)\n}", + ), + ("{ return 1 + 2 }", 2, "{\n Error\n}"), + ("{ return; }", 1, "{\n Error\n}"), + ]; + + let show_errors = |v| vecmap(&v, ToString::to_string).join("\n"); + + let results = vecmap(&cases, |&(src, expected_errors, expected_result)| { + let (opt, errors) = parse_recover(block(expression()), src); + let actual = opt.map(|ast| ast.to_string()); + let actual = if let Some(s) = &actual { s.to_string() } else { "(none)".to_string() }; + + let result = + ((errors.len(), actual.clone()), (expected_errors, expected_result.to_string())); + if result.0 != result.1 { + let num_errors = errors.len(); + let shown_errors = show_errors(errors); + eprintln!( + "\nExpected {} error(s) and got {}:\n\n{}\n\nFrom input: {}\nExpected AST: {}\nActual AST: {}\n", + expected_errors, num_errors, shown_errors, src, expected_result, actual); + } + result + }); + + assert_eq!(vecmap(&results, |t| t.0.clone()), vecmap(&results, |t| t.1.clone()),); + } } From 84673ef1c51e4aad1e15a670b02725e15bf49ddb Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 12 May 2023 05:22:58 +0800 Subject: [PATCH 58/66] chore: add support for encoding/decoding inputs from JSON (#1325) * chore: move `parse_str_to_field` into `mod.rs` * chore: clean up stale comment * feat: add support for reading/writing arguments from json * chore: update errors to be generic over file formats * fix: make `AbiTYpeMismatch` generic to abi type * chore: run serialization tests on all `Format` variants * chore: cspell * chore: update error message * chore: add comments explaining limits on number size --- Cargo.lock | 2 + crates/noirc_abi/Cargo.toml | 4 +- crates/noirc_abi/src/errors.rs | 16 ++- crates/noirc_abi/src/input_parser/json.rs | 155 ++++++++++++++++++++++ crates/noirc_abi/src/input_parser/mod.rs | 92 +++++++++++++ crates/noirc_abi/src/input_parser/toml.rs | 36 +---- 6 files changed, 268 insertions(+), 37 deletions(-) create mode 100644 crates/noirc_abi/src/input_parser/json.rs diff --git a/Cargo.lock b/Cargo.lock index 7a631368870..176a8450b3f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1922,6 +1922,8 @@ dependencies = [ "iter-extended", "serde", "serde_json", + "strum", + "strum_macros", "thiserror", "toml", ] diff --git a/crates/noirc_abi/Cargo.toml b/crates/noirc_abi/Cargo.toml index 3d12afc8293..6af0cfe78b3 100644 --- a/crates/noirc_abi/Cargo.toml +++ b/crates/noirc_abi/Cargo.toml @@ -10,8 +10,10 @@ edition.workspace = true acvm.workspace = true iter-extended.workspace = true toml.workspace = true +serde_json = "1.0" serde.workspace = true thiserror.workspace = true [dev-dependencies] -serde_json = "1.0" +strum = "0.24" +strum_macros = "0.24" \ No newline at end of file diff --git a/crates/noirc_abi/src/errors.rs b/crates/noirc_abi/src/errors.rs index 4dc8a4bdc41..80f9d665dff 100644 --- a/crates/noirc_abi/src/errors.rs +++ b/crates/noirc_abi/src/errors.rs @@ -4,15 +4,15 @@ use thiserror::Error; #[derive(Debug, Error)] pub enum InputParserError { - #[error("input.toml file is badly formed, could not parse, {0}")] - ParseTomlMap(String), + #[error("input file is badly formed, could not parse, {0}")] + ParseInputMap(String), #[error("Expected witness values to be integers, provided value causes `{0}` error")] ParseStr(String), #[error("Could not parse hex value {0}")] ParseHexStr(String), #[error("duplicate variable name {0}")] DuplicateVariableName(String), - #[error("cannot parse a string toml type into {0:?}")] + #[error("cannot parse value into {0:?}")] AbiTypeMismatch(AbiType), #[error("Expected argument `{0}`, but none was found")] MissingArgument(String), @@ -20,13 +20,19 @@ pub enum InputParserError { impl From for InputParserError { fn from(err: toml::ser::Error) -> Self { - Self::ParseTomlMap(err.to_string()) + Self::ParseInputMap(err.to_string()) } } impl From for InputParserError { fn from(err: toml::de::Error) -> Self { - Self::ParseTomlMap(err.to_string()) + Self::ParseInputMap(err.to_string()) + } +} + +impl From for InputParserError { + fn from(err: serde_json::Error) -> Self { + Self::ParseInputMap(err.to_string()) } } diff --git a/crates/noirc_abi/src/input_parser/json.rs b/crates/noirc_abi/src/input_parser/json.rs new file mode 100644 index 00000000000..d0d53ce38b2 --- /dev/null +++ b/crates/noirc_abi/src/input_parser/json.rs @@ -0,0 +1,155 @@ +use super::{parse_str_to_field, InputValue}; +use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; +use acvm::FieldElement; +use iter_extended::{btree_map, try_btree_map, try_vecmap, vecmap}; +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; + +pub(crate) fn parse_json( + input_string: &str, + abi: &Abi, +) -> Result, InputParserError> { + // Parse input.json into a BTreeMap. + let data: BTreeMap = serde_json::from_str(input_string)?; + + // Convert arguments to field elements. + let mut parsed_inputs = try_btree_map(abi.to_btree_map(), |(arg_name, abi_type)| { + // Check that json contains a value for each argument in the ABI. + let value = data + .get(&arg_name) + .ok_or_else(|| InputParserError::MissingArgument(arg_name.clone()))?; + + InputValue::try_from_json(value.clone(), &abi_type, &arg_name) + .map(|input_value| (arg_name, input_value)) + })?; + + // If the json file also includes a return value then we parse it as well. + // This isn't required as the prover calculates the return value itself. + if let (Some(return_type), Some(json_return_value)) = + (&abi.return_type, data.get(MAIN_RETURN_NAME)) + { + let return_value = + InputValue::try_from_json(json_return_value.clone(), return_type, MAIN_RETURN_NAME)?; + parsed_inputs.insert(MAIN_RETURN_NAME.to_owned(), return_value); + } + + Ok(parsed_inputs) +} + +pub(crate) fn serialize_to_json( + w_map: &BTreeMap, +) -> Result { + let to_map: BTreeMap<_, _> = + w_map.iter().map(|(key, value)| (key, JsonTypes::from(value.clone()))).collect(); + + let json_string = serde_json::to_string(&to_map)?; + + Ok(json_string) +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +#[serde(untagged)] +enum JsonTypes { + // This is most likely going to be a hex string + // But it is possible to support UTF-8 + String(String), + // Just a regular integer, that can fit in 64 bits. + // + // The JSON spec does not specify any limit on the size of integer number types, + // however we restrict the allowable size. Values which do not fit in a u64 should be passed + // as a string. + Integer(u64), + // Simple boolean flag + Bool(bool), + // Array of regular integers + ArrayNum(Vec), + // Array of hexadecimal integers + ArrayString(Vec), + // Array of booleans + ArrayBool(Vec), + // Struct of JsonTypes + Table(BTreeMap), +} + +impl From for JsonTypes { + fn from(value: InputValue) -> Self { + match value { + InputValue::Field(f) => { + let f_str = format!("0x{}", f.to_hex()); + JsonTypes::String(f_str) + } + InputValue::Vec(v) => { + let array = v.iter().map(|i| format!("0x{}", i.to_hex())).collect(); + JsonTypes::ArrayString(array) + } + InputValue::String(s) => JsonTypes::String(s), + InputValue::Struct(map) => { + let map_with_json_types = + btree_map(map, |(key, value)| (key, JsonTypes::from(value))); + JsonTypes::Table(map_with_json_types) + } + } + } +} + +impl InputValue { + fn try_from_json( + value: JsonTypes, + param_type: &AbiType, + arg_name: &str, + ) -> Result { + let input_value = match value { + JsonTypes::String(string) => match param_type { + AbiType::String { .. } => InputValue::String(string), + AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean => { + InputValue::Field(parse_str_to_field(&string)?) + } + + AbiType::Array { .. } | AbiType::Struct { .. } => { + return Err(InputParserError::AbiTypeMismatch(param_type.clone())) + } + }, + JsonTypes::Integer(integer) => { + let new_value = FieldElement::from(i128::from(integer)); + + InputValue::Field(new_value) + } + JsonTypes::Bool(boolean) => InputValue::Field(boolean.into()), + JsonTypes::ArrayNum(arr_num) => { + let array_elements = + vecmap(arr_num, |elem_num| FieldElement::from(i128::from(elem_num))); + + InputValue::Vec(array_elements) + } + JsonTypes::ArrayString(arr_str) => { + let array_elements = try_vecmap(arr_str, |elem_str| parse_str_to_field(&elem_str))?; + + InputValue::Vec(array_elements) + } + JsonTypes::ArrayBool(arr_bool) => { + let array_elements = vecmap(arr_bool, FieldElement::from); + + InputValue::Vec(array_elements) + } + + JsonTypes::Table(table) => match param_type { + AbiType::Struct { fields } => { + let native_table = try_btree_map(fields, |(field_name, abi_type)| { + // Check that json contains a value for each field of the struct. + let field_id = format!("{arg_name}.{field_name}"); + let value = table + .get(field_name) + .ok_or_else(|| InputParserError::MissingArgument(field_id.clone()))?; + InputValue::try_from_json(value.clone(), abi_type, &field_id) + .map(|input_value| (field_name.to_string(), input_value)) + })?; + + InputValue::Struct(native_table) + } + _ => return Err(InputParserError::AbiTypeMismatch(param_type.clone())), + }, + }; + + Ok(input_value) + } +} diff --git a/crates/noirc_abi/src/input_parser/mod.rs b/crates/noirc_abi/src/input_parser/mod.rs index cf9e0909f57..1b54cb84df8 100644 --- a/crates/noirc_abi/src/input_parser/mod.rs +++ b/crates/noirc_abi/src/input_parser/mod.rs @@ -1,3 +1,4 @@ +mod json; mod toml; use std::{collections::BTreeMap, path::Path}; @@ -73,13 +74,16 @@ pub trait InitialWitnessParser { /// The different formats that are supported when parsing /// the initial witness values +#[cfg_attr(test, derive(strum_macros::EnumIter))] pub enum Format { + Json, Toml, } impl Format { pub fn ext(&self) -> &'static str { match self { + Format::Json => "json", Format::Toml => "toml", } } @@ -92,6 +96,7 @@ impl Format { abi: &Abi, ) -> Result, InputParserError> { match self { + Format::Json => json::parse_json(input_string, abi), Format::Toml => toml::parse_toml(input_string, abi), } } @@ -101,7 +106,94 @@ impl Format { w_map: &BTreeMap, ) -> Result { match self { + Format::Json => json::serialize_to_json(w_map), Format::Toml => toml::serialize_to_toml(w_map), } } } + +#[cfg(test)] +mod serialization_tests { + use std::collections::BTreeMap; + + use acvm::FieldElement; + use strum::IntoEnumIterator; + + use crate::{ + input_parser::InputValue, Abi, AbiParameter, AbiType, AbiVisibility, Sign, MAIN_RETURN_NAME, + }; + + use super::Format; + + #[test] + fn serialization_round_trip() { + let abi = Abi { + parameters: vec![ + AbiParameter { + name: "foo".into(), + typ: AbiType::Field, + visibility: AbiVisibility::Private, + }, + AbiParameter { + name: "bar".into(), + typ: AbiType::Struct { + fields: BTreeMap::from([ + ("field1".into(), AbiType::Integer { sign: Sign::Unsigned, width: 8 }), + ( + "field2".into(), + AbiType::Array { length: 2, typ: Box::new(AbiType::Boolean) }, + ), + ]), + }, + visibility: AbiVisibility::Private, + }, + ], + return_type: Some(AbiType::String { length: 5 }), + // These two fields are unused when serializing/deserializing to file. + param_witnesses: BTreeMap::new(), + return_witnesses: Vec::new(), + }; + + let input_map: BTreeMap = BTreeMap::from([ + ("foo".into(), InputValue::Field(FieldElement::one())), + ( + "bar".into(), + InputValue::Struct(BTreeMap::from([ + ("field1".into(), InputValue::Field(255u128.into())), + ("field2".into(), InputValue::Vec(vec![true.into(), false.into()])), + ])), + ), + (MAIN_RETURN_NAME.into(), InputValue::String("hello".to_owned())), + ]); + + for format in Format::iter() { + let serialized_inputs = format.serialize(&input_map).unwrap(); + + let reconstructed_input_map = format.parse(&serialized_inputs, &abi).unwrap(); + + assert_eq!(input_map, reconstructed_input_map); + } + } +} + +fn parse_str_to_field(value: &str) -> Result { + if value.starts_with("0x") { + FieldElement::from_hex(value).ok_or_else(|| InputParserError::ParseHexStr(value.to_owned())) + } else { + value + .parse::() + .map_err(|err_msg| InputParserError::ParseStr(err_msg.to_string())) + .map(FieldElement::from) + } +} + +#[cfg(test)] +mod test { + use super::parse_str_to_field; + + #[test] + fn parse_empty_str_fails() { + // Check that this fails appropriately rather than being treated as 0, etc. + assert!(parse_str_to_field("").is_err()); + } +} diff --git a/crates/noirc_abi/src/input_parser/toml.rs b/crates/noirc_abi/src/input_parser/toml.rs index a737f784031..a5a50456e23 100644 --- a/crates/noirc_abi/src/input_parser/toml.rs +++ b/crates/noirc_abi/src/input_parser/toml.rs @@ -1,4 +1,4 @@ -use super::InputValue; +use super::{parse_str_to_field, InputValue}; use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; use acvm::FieldElement; use iter_extended::{btree_map, try_btree_map, try_vecmap, vecmap}; @@ -12,18 +12,13 @@ pub(crate) fn parse_toml( // Parse input.toml into a BTreeMap. let data: BTreeMap = toml::from_str(input_string)?; - // The toml map is stored in an ordered BTreeMap. As the keys are strings the map is in alphanumerical order. - // When parsing the toml map we recursively go through each field to enable struct inputs. - // To match this map with the correct abi type we reorganize our abi by parameter name in a BTreeMap, while the struct fields - // in the abi are already stored in a BTreeMap. - let abi_map = abi.to_btree_map(); - // Convert arguments to field elements. - let mut parsed_inputs = try_btree_map(abi_map, |(arg_name, abi_type)| { + let mut parsed_inputs = try_btree_map(abi.to_btree_map(), |(arg_name, abi_type)| { // Check that toml contains a value for each argument in the ABI. let value = data .get(&arg_name) .ok_or_else(|| InputParserError::MissingArgument(arg_name.clone()))?; + InputValue::try_from_toml(value.clone(), &abi_type, &arg_name) .map(|input_value| (arg_name, input_value)) })?; @@ -58,7 +53,8 @@ enum TomlTypes { // This is most likely going to be a hex string // But it is possible to support UTF-8 String(String), - // Just a regular integer, that can fit in 128 bits + // Just a regular integer, that can fit in 64 bits + // Note that the toml spec specifies that all numbers are represented as `i64`s. Integer(u64), // Simple boolean flag Bool(bool), @@ -154,25 +150,3 @@ impl InputValue { Ok(input_value) } } - -fn parse_str_to_field(value: &str) -> Result { - if value.starts_with("0x") { - FieldElement::from_hex(value).ok_or_else(|| InputParserError::ParseHexStr(value.to_owned())) - } else { - value - .parse::() - .map_err(|err_msg| InputParserError::ParseStr(err_msg.to_string())) - .map(FieldElement::from) - } -} - -#[cfg(test)] -mod test { - use super::parse_str_to_field; - - #[test] - fn parse_empty_str_fails() { - // Check that this fails appropriately rather than being treated as 0, etc. - assert!(parse_str_to_field("").is_err()); - } -} From 8a36611be1c7d2009852367c8e3ef7684f8cddcc Mon Sep 17 00:00:00 2001 From: kevaundray Date: Fri, 12 May 2023 00:17:08 +0100 Subject: [PATCH 59/66] feat: Add ECDSA secp256k1 builtin test (#1294) * Copy ECDSA from commit 92ef9b4 * rework for new syntax * commit since github was down * [NOT MERGEABLE] - Skip all tests except ecdsa * change parameter order * update flake.lock * update test * update flake.lock * update flake.lock * modify test * add message + hashed_message to program * remove underscore * modify input to use low-S * commit s variant 1 * commit variant S2 * modify s value in signature to value that passes in acvm/pwg * update flake.lock * remove if statement to make tests go faster * cleanup test --- .../test_data/ecdsa_secp256k1/Nargo.toml | 6 + .../test_data/ecdsa_secp256k1/Prover.toml | 209 ++++++++++++++++++ .../test_data/ecdsa_secp256k1/src/main.nr | 11 + flake.lock | 6 +- 4 files changed, 229 insertions(+), 3 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/ecdsa_secp256k1/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Nargo.toml b/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Nargo.toml new file mode 100644 index 00000000000..7199d3305bf --- /dev/null +++ b/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "ECDSA secp256k1 verification" +authors = [""] +compiler_version = "0.1" + +[dependencies] diff --git a/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Prover.toml b/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Prover.toml new file mode 100644 index 00000000000..412c7b36e4c --- /dev/null +++ b/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/Prover.toml @@ -0,0 +1,209 @@ + +hashed_message = [ + 0x3a, + 0x73, + 0xf4, + 0x12, + 0x3a, + 0x5c, + 0xd2, + 0x12, + 0x1f, + 0x21, + 0xcd, + 0x7e, + 0x8d, + 0x35, + 0x88, + 0x35, + 0x47, + 0x69, + 0x49, + 0xd0, + 0x35, + 0xd9, + 0xc2, + 0xda, + 0x68, + 0x06, + 0xb4, + 0x63, + 0x3a, + 0xc8, + 0xc1, + 0xe2, +] +message = [ + 0x49, + 0x6e, + 0x73, + 0x74, + 0x72, + 0x75, + 0x63, + 0x74, + 0x69, + 0x6f, + 0x6e, + 0x73, + 0x20, + 0x75, + 0x6e, + 0x63, + 0x6c, + 0x65, + 0x61, + 0x72, + 0x2c, + 0x20, + 0x61, + 0x73, + 0x6b, + 0x20, + 0x61, + 0x67, + 0x61, + 0x69, + 0x6e, + 0x20, + 0x6c, + 0x61, + 0x74, + 0x65, + 0x72, + 0x2e, +] +pub_key_x = [ + 0xa0, + 0x43, + 0x4d, + 0x9e, + 0x47, + 0xf3, + 0xc8, + 0x62, + 0x35, + 0x47, + 0x7c, + 0x7b, + 0x1a, + 0xe6, + 0xae, + 0x5d, + 0x34, + 0x42, + 0xd4, + 0x9b, + 0x19, + 0x43, + 0xc2, + 0xb7, + 0x52, + 0xa6, + 0x8e, + 0x2a, + 0x47, + 0xe2, + 0x47, + 0xc7, +] +pub_key_y = [ + 0x89, + 0x3a, + 0xba, + 0x42, + 0x54, + 0x19, + 0xbc, + 0x27, + 0xa3, + 0xb6, + 0xc7, + 0xe6, + 0x93, + 0xa2, + 0x4c, + 0x69, + 0x6f, + 0x79, + 0x4c, + 0x2e, + 0xd8, + 0x77, + 0xa1, + 0x59, + 0x3c, + 0xbe, + 0xe5, + 0x3b, + 0x03, + 0x73, + 0x68, + 0xd7, +] +signature = [ + 0xe5, + 0x08, + 0x1c, + 0x80, + 0xab, + 0x42, + 0x7d, + 0xc3, + 0x70, + 0x34, + 0x6f, + 0x4a, + 0x0e, + 0x31, + 0xaa, + 0x2b, + 0xad, + 0x8d, + 0x97, + 0x98, + 0xc3, + 0x80, + 0x61, + 0xdb, + 0x9a, + 0xe5, + 0x5a, + 0x4e, + 0x8d, + 0xf4, + 0x54, + 0xfd, + 0x28, + 0x11, + 0x98, + 0x94, + 0x34, + 0x4e, + 0x71, + 0xb7, + 0x87, + 0x70, + 0xcc, + 0x93, + 0x1d, + 0x61, + 0xf4, + 0x80, + 0xec, + 0xbb, + 0x0b, + 0x89, + 0xd6, + 0xeb, + 0x69, + 0x69, + 0x01, + 0x61, + 0xe4, + 0x9a, + 0x71, + 0x5f, + 0xcd, + 0x55, +] diff --git a/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/src/main.nr b/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/src/main.nr new file mode 100644 index 00000000000..1cb40f09cd0 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/ecdsa_secp256k1/src/main.nr @@ -0,0 +1,11 @@ +use dep::std; + + +fn main(message : [u8;38],hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + // Hash the message, since secp256k1 expects a hashed_message + let expected= std::hash::sha256(message); + assert(hashed_message == expected); + + let x = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(x == 1); +} \ No newline at end of file diff --git a/flake.lock b/flake.lock index 8d6445d3cf6..62abdb4a42e 100644 --- a/flake.lock +++ b/flake.lock @@ -10,11 +10,11 @@ ] }, "locked": { - "lastModified": 1683314474, - "narHash": "sha256-gfHYpOnVTfS+4fhScBhfkB/e5z+jPFCi8zSy+aEh+8s=", + "lastModified": 1683839119, + "narHash": "sha256-pVoW3C5Ek9/878PBzvXqnt51KpIDrxPt5HVtNwdErnE=", "owner": "AztecProtocol", "repo": "barretenberg", - "rev": "ad615ee7dc931d3dbea041e47c96b9d8dccebf98", + "rev": "223b9dee2542145d67126cc8a5aa0e9b9d82c244", "type": "github" }, "original": { From da473685524fc6e5e17f9c3eb95116378ac41fb8 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Fri, 12 May 2023 06:23:19 +0100 Subject: [PATCH 60/66] chore!: Update to acvm 0.11.0 (#1322) * chore!: Update to acvm 0.11.0 * chore: Update nargo core to return backend errors * chore: Make CliError generic over a Backend chore!: Split filesystem errors off from CliError chore!: Make all run functions take a backend and pass it from the CLI entry * update to latest commit * chore: replace long `Backend` type parameters with `B` * fix grep problems * remove unneeded import * latest master * chore: update `acvm-backend-barretenberg` to 0.1.0 commit * chore: use `try_vecmap` in old `vecmap` locations * chore: add missing `?` * official release of backend --------- Co-authored-by: Tom French --- Cargo.lock | 24 +++++++------ Cargo.toml | 2 +- crates/nargo/src/ops/codegen_verifier.rs | 10 +++--- crates/nargo/src/ops/preprocess.rs | 35 +++++++++---------- crates/nargo/src/ops/prove.rs | 12 +++---- crates/nargo/src/ops/verify.rs | 12 +++---- crates/nargo_cli/Cargo.toml | 2 +- crates/nargo_cli/src/cli/check_cmd.rs | 27 +++++++++----- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 17 +++++---- crates/nargo_cli/src/cli/compile_cmd.rs | 32 +++++++++-------- crates/nargo_cli/src/cli/execute_cmd.rs | 27 +++++++------- crates/nargo_cli/src/cli/fs/inputs.rs | 8 ++--- crates/nargo_cli/src/cli/fs/mod.rs | 10 +++--- crates/nargo_cli/src/cli/fs/program.rs | 7 ++-- crates/nargo_cli/src/cli/fs/proof.rs | 4 +-- crates/nargo_cli/src/cli/fs/witness.rs | 4 +-- crates/nargo_cli/src/cli/gates_cmd.rs | 23 +++++++----- crates/nargo_cli/src/cli/mod.rs | 25 +++++++------ crates/nargo_cli/src/cli/new_cmd.rs | 10 ++++-- crates/nargo_cli/src/cli/print_acir_cmd.rs | 18 ++++++---- crates/nargo_cli/src/cli/prove_cmd.rs | 29 +++++++++------ crates/nargo_cli/src/cli/test_cmd.rs | 28 ++++++++------- crates/nargo_cli/src/cli/verify_cmd.rs | 23 +++++++----- crates/nargo_cli/src/errors.rs | 35 ++++++++++++++----- 24 files changed, 244 insertions(+), 180 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 176a8450b3f..c7a30e3adc0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "acir" -version = "0.10.3" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "510b65efd4d20bf266185ce0a5dc7d29bcdd196a6a1835c20908fd88040de76c" +checksum = "084577e67b44c72d1cdfabe286d48adac6f5e0ad441ef134c5c467f4b6eee291" dependencies = [ "acir_field", "flate2", @@ -16,9 +16,9 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.10.3" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f032e710c67fd146caedc8fe1dea6e95f01ab59453e42d59b604a51fef3dfe" +checksum = "a267ef529f4b132293199ecdf8c232ade817f01d916039f2d34562cab39e75e9" dependencies = [ "ark-bn254", "ark-ff", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "acvm" -version = "0.10.3" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2611266039740ffd1978f23258bd6ce3166c22cf15b8227685c2f3bb20ae2ee0" +checksum = "4e1d6795105b50b13fa0dd1779b5191c4d8e9cd98b357b0b9a0b04a847baacf0" dependencies = [ "acir", "acvm_stdlib", @@ -43,13 +43,15 @@ dependencies = [ "num-bigint", "num-traits", "sha2 0.9.9", + "sha3", "thiserror", ] [[package]] name = "acvm-backend-barretenberg" -version = "0.0.0" -source = "git+https://github.com/noir-lang/aztec_backend?rev=677f10e07011849f8aa0d75fe80390bb3081b1e5#677f10e07011849f8aa0d75fe80390bb3081b1e5" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85bf493c97da8c528c21353452aa10f7972f4870a3aab90919bcc08ba56a8cd8" dependencies = [ "acvm", "barretenberg-sys", @@ -61,16 +63,16 @@ dependencies = [ "pkg-config", "reqwest", "rust-embed", - "sha3", + "thiserror", "tokio", "wasmer", ] [[package]] name = "acvm_stdlib" -version = "0.10.3" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5ec51160c66eba75dc15a028a2391675386fd395b3897478d89a386c64a48dd" +checksum = "3131af53d17ac12340c0ff50f8555d8e040321f8078b8ee3cd8846560b6a44a9" dependencies = [ "acir", ] diff --git a/Cargo.toml b/Cargo.toml index badaab032ce..4169aa9b6eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ edition = "2021" rust-version = "1.66" [workspace.dependencies] -acvm = "0.10.3" +acvm = "0.11.0" arena = { path = "crates/arena" } fm = { path = "crates/fm" } iter-extended = { path = "crates/iter-extended" } diff --git a/crates/nargo/src/ops/codegen_verifier.rs b/crates/nargo/src/ops/codegen_verifier.rs index ead125699b4..2a0b54df865 100644 --- a/crates/nargo/src/ops/codegen_verifier.rs +++ b/crates/nargo/src/ops/codegen_verifier.rs @@ -1,10 +1,8 @@ use acvm::SmartContract; -use crate::NargoError; - -pub fn codegen_verifier( - backend: &impl SmartContract, +pub fn codegen_verifier( + backend: &B, verification_key: &[u8], -) -> Result { - Ok(backend.eth_contract_from_vk(verification_key)) +) -> Result { + backend.eth_contract_from_vk(verification_key) } diff --git a/crates/nargo/src/ops/preprocess.rs b/crates/nargo/src/ops/preprocess.rs index f8d4eb5a825..3be8151f9c9 100644 --- a/crates/nargo/src/ops/preprocess.rs +++ b/crates/nargo/src/ops/preprocess.rs @@ -1,26 +1,23 @@ use acvm::ProofSystemCompiler; -use iter_extended::vecmap; +use iter_extended::try_vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; -use crate::{ - artifacts::{ - contract::{PreprocessedContract, PreprocessedContractFunction}, - program::PreprocessedProgram, - }, - NargoError, +use crate::artifacts::{ + contract::{PreprocessedContract, PreprocessedContractFunction}, + program::PreprocessedProgram, }; // TODO: pull this from backend. const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; -pub fn preprocess_program( - backend: &impl ProofSystemCompiler, +pub fn preprocess_program( + backend: &B, compiled_program: CompiledProgram, -) -> Result { +) -> Result { // TODO: currently `compiled_program`'s bytecode is already optimized for the backend. // In future we'll need to apply those optimizations here. let optimized_bytecode = compiled_program.circuit; - let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); + let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode)?; Ok(PreprocessedProgram { backend: String::from(BACKEND_IDENTIFIER), @@ -31,17 +28,17 @@ pub fn preprocess_program( }) } -pub fn preprocess_contract( - backend: &impl ProofSystemCompiler, +pub fn preprocess_contract( + backend: &B, compiled_contract: CompiledContract, -) -> Result { - let preprocessed_contract_functions = vecmap(compiled_contract.functions, |func| { +) -> Result { + let preprocessed_contract_functions = try_vecmap(compiled_contract.functions, |func| { // TODO: currently `func`'s bytecode is already optimized for the backend. // In future we'll need to apply those optimizations here. let optimized_bytecode = func.bytecode; - let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode); + let (proving_key, verification_key) = backend.preprocess(&optimized_bytecode)?; - PreprocessedContractFunction { + Ok(PreprocessedContractFunction { name: func.name, function_type: func.function_type, abi: func.abi, @@ -49,8 +46,8 @@ pub fn preprocess_contract( bytecode: optimized_bytecode, proving_key, verification_key, - } - }); + }) + })?; Ok(PreprocessedContract { name: compiled_contract.name, diff --git a/crates/nargo/src/ops/prove.rs b/crates/nargo/src/ops/prove.rs index 376220a8a74..80771bc9cb7 100644 --- a/crates/nargo/src/ops/prove.rs +++ b/crates/nargo/src/ops/prove.rs @@ -2,15 +2,11 @@ use acvm::acir::circuit::Circuit; use acvm::ProofSystemCompiler; use noirc_abi::WitnessMap; -use crate::NargoError; - -pub fn prove_execution( - backend: &impl ProofSystemCompiler, +pub fn prove_execution( + backend: &B, circuit: &Circuit, solved_witness: WitnessMap, proving_key: &[u8], -) -> Result, NargoError> { - let proof = backend.prove_with_pk(circuit, solved_witness, proving_key); - - Ok(proof) +) -> Result, B::Error> { + backend.prove_with_pk(circuit, solved_witness, proving_key) } diff --git a/crates/nargo/src/ops/verify.rs b/crates/nargo/src/ops/verify.rs index 5109d2291db..cd76fbd430e 100644 --- a/crates/nargo/src/ops/verify.rs +++ b/crates/nargo/src/ops/verify.rs @@ -2,16 +2,12 @@ use acvm::acir::circuit::Circuit; use acvm::ProofSystemCompiler; use noirc_abi::WitnessMap; -use crate::NargoError; - -pub fn verify_proof( - backend: &impl ProofSystemCompiler, +pub fn verify_proof( + backend: &B, circuit: &Circuit, proof: &[u8], public_inputs: WitnessMap, verification_key: &[u8], -) -> Result { - let valid_proof = backend.verify_with_vk(proof, public_inputs, circuit, verification_key); - - Ok(valid_proof) +) -> Result { + backend.verify_with_vk(proof, public_inputs, circuit, verification_key) } diff --git a/crates/nargo_cli/Cargo.toml b/crates/nargo_cli/Cargo.toml index 6e8801301b1..74db54c8ef7 100644 --- a/crates/nargo_cli/Cargo.toml +++ b/crates/nargo_cli/Cargo.toml @@ -37,7 +37,7 @@ termcolor = "1.1.2" color-eyre = "0.6.2" # Backends -acvm-backend-barretenberg = { git = "https://github.com/noir-lang/aztec_backend", rev = "677f10e07011849f8aa0d75fe80390bb3081b1e5", default-features = false } +acvm-backend-barretenberg = { version = "0.1.2", default-features = false } [dev-dependencies] tempdir = "0.3.7" diff --git a/crates/nargo_cli/src/cli/check_cmd.rs b/crates/nargo_cli/src/cli/check_cmd.rs index 3049c830def..9664930466b 100644 --- a/crates/nargo_cli/src/cli/check_cmd.rs +++ b/crates/nargo_cli/src/cli/check_cmd.rs @@ -1,5 +1,5 @@ use crate::{errors::CliError, resolver::Resolver}; -use acvm::ProofSystemCompiler; +use acvm::Backend; use clap::Args; use iter_extended::btree_map; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; @@ -17,15 +17,21 @@ pub(crate) struct CheckCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliError> { - check_from_path(config.program_dir, &args.compile_options)?; +pub(crate) fn run( + backend: &B, + args: CheckCommand, + config: NargoConfig, +) -> Result<(), CliError> { + check_from_path(backend, config.program_dir, &args.compile_options)?; println!("Constraint system successfully built!"); Ok(()) } -fn check_from_path>(p: P, compile_options: &CompileOptions) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - +fn check_from_path>( + backend: &B, + p: P, + compile_options: &CompileOptions, +) -> Result<(), CliError> { let mut driver = Resolver::resolve_root_manifest(p.as_ref(), backend.np_language())?; driver.check_crate(compile_options).map_err(|_| CliError::CompilationError)?; @@ -148,12 +154,13 @@ d2 = ["", "", ""] let pass_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(format!("{TEST_DATA_DIR}/pass")); + let backend = crate::backends::ConcreteBackend::default(); let config = CompileOptions::default(); let paths = std::fs::read_dir(pass_dir).unwrap(); for path in paths.flatten() { let path = path.path(); assert!( - super::check_from_path(path.clone(), &config).is_ok(), + super::check_from_path(&backend, path.clone(), &config).is_ok(), "path: {}", path.display() ); @@ -166,12 +173,13 @@ d2 = ["", "", ""] let fail_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(format!("{TEST_DATA_DIR}/fail")); + let backend = crate::backends::ConcreteBackend::default(); let config = CompileOptions::default(); let paths = std::fs::read_dir(fail_dir).unwrap(); for path in paths.flatten() { let path = path.path(); assert!( - super::check_from_path(path.clone(), &config).is_err(), + super::check_from_path(&backend, path.clone(), &config).is_err(), "path: {}", path.display() ); @@ -183,13 +191,14 @@ d2 = ["", "", ""] let pass_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join(format!("{TEST_DATA_DIR}/pass_dev_mode")); + let backend = crate::backends::ConcreteBackend::default(); let config = CompileOptions { allow_warnings: true, ..Default::default() }; let paths = std::fs::read_dir(pass_dir).unwrap(); for path in paths.flatten() { let path = path.path(); assert!( - super::check_from_path(path.clone(), &config).is_ok(), + super::check_from_path(&backend, path.clone(), &config).is_ok(), "path: {}", path.display() ); diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index f23502a15b5..d38433e2d1c 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -4,6 +4,7 @@ use crate::{ cli::compile_cmd::compile_circuit, constants::CONTRACT_DIR, constants::TARGET_DIR, errors::CliError, }; +use acvm::Backend; use clap::Args; use nargo::ops::{codegen_verifier, preprocess_program}; use noirc_driver::CompileOptions; @@ -18,9 +19,11 @@ pub(crate) struct CodegenVerifierCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - +pub(crate) fn run( + backend: &B, + args: CodegenVerifierCommand, + config: NargoConfig, +) -> Result<(), CliError> { // TODO(#1201): Should this be a utility function? let circuit_build_path = args .circuit_name @@ -30,12 +33,14 @@ pub(crate) fn run(args: CodegenVerifierCommand, config: NargoConfig) -> Result<( Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, None => { let compiled_program = - compile_circuit(&backend, config.program_dir.as_ref(), &args.compile_options)?; - preprocess_program(&backend, compiled_program)? + compile_circuit(backend, config.program_dir.as_ref(), &args.compile_options)?; + preprocess_program(backend, compiled_program) + .map_err(CliError::ProofSystemCompilerError)? } }; - let smart_contract_string = codegen_verifier(&backend, &preprocessed_program.verification_key)?; + let smart_contract_string = codegen_verifier(backend, &preprocessed_program.verification_key) + .map_err(CliError::SmartContractError)?; let contract_dir = config.program_dir.join(CONTRACT_DIR); create_named_dir(&contract_dir, "contract"); diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index 78b52003166..531560b87db 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -1,4 +1,4 @@ -use acvm::ProofSystemCompiler; +use acvm::Backend; use iter_extended::try_vecmap; use noirc_driver::{CompileOptions, CompiledProgram, Driver}; use std::path::Path; @@ -27,19 +27,22 @@ pub(crate) struct CompileCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run( + backend: &B, + args: CompileCommand, + config: NargoConfig, +) -> Result<(), CliError> { let circuit_dir = config.program_dir.join(TARGET_DIR); - let backend = crate::backends::ConcreteBackend::default(); - // If contracts is set we're compiling every function in a 'contract' rather than just 'main'. if args.contracts { - let mut driver = setup_driver(&backend, &config.program_dir)?; + let mut driver = setup_driver(backend, &config.program_dir)?; let compiled_contracts = driver .compile_contracts(&args.compile_options) .map_err(|_| CliError::CompilationError)?; - let preprocessed_contracts = - try_vecmap(compiled_contracts, |contract| preprocess_contract(&backend, contract))?; + let preprocessed_contracts = try_vecmap(compiled_contracts, |contract| { + preprocess_contract(backend, contract).map_err(CliError::ProofSystemCompilerError) + })?; for contract in preprocessed_contracts { save_contract_to_file( &contract, @@ -48,25 +51,26 @@ pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliEr ); } } else { - let program = compile_circuit(&backend, &config.program_dir, &args.compile_options)?; - let preprocessed_program = preprocess_program(&backend, program)?; + let program = compile_circuit(backend, &config.program_dir, &args.compile_options)?; + let preprocessed_program = + preprocess_program(backend, program).map_err(CliError::ProofSystemCompilerError)?; save_program_to_file(&preprocessed_program, &args.circuit_name, circuit_dir); } Ok(()) } -fn setup_driver( - backend: &impl ProofSystemCompiler, +fn setup_driver( + backend: &B, program_dir: &Path, ) -> Result { Resolver::resolve_root_manifest(program_dir, backend.np_language()) } -pub(crate) fn compile_circuit( - backend: &impl ProofSystemCompiler, +pub(crate) fn compile_circuit( + backend: &B, program_dir: &Path, compile_options: &CompileOptions, -) -> Result { +) -> Result> { let mut driver = setup_driver(backend, program_dir)?; driver.compile_main(compile_options).map_err(|_| CliError::CompilationError) } diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index b9b2be3febf..855fea04cb3 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -1,7 +1,7 @@ use std::path::Path; use acvm::acir::circuit::Circuit; -use acvm::PartialWitnessGenerator; +use acvm::Backend; use clap::Args; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::{Abi, InputMap, WitnessMap}; @@ -25,9 +25,13 @@ pub(crate) struct ExecuteCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run( + backend: &B, + args: ExecuteCommand, + config: NargoConfig, +) -> Result<(), CliError> { let (return_value, solved_witness) = - execute_with_path(&config.program_dir, &args.compile_options)?; + execute_with_path(backend, &config.program_dir, &args.compile_options)?; println!("Circuit witness successfully solved"); if let Some(return_value) = return_value { @@ -43,19 +47,18 @@ pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliEr Ok(()) } -fn execute_with_path( +fn execute_with_path( + backend: &B, program_dir: &Path, compile_options: &CompileOptions, -) -> Result<(Option, WitnessMap), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - - let CompiledProgram { abi, circuit } = compile_circuit(&backend, program_dir, compile_options)?; +) -> Result<(Option, WitnessMap), CliError> { + let CompiledProgram { abi, circuit } = compile_circuit(backend, program_dir, compile_options)?; // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(program_dir, PROVER_INPUT_FILE, Format::Toml, &abi)?; - let solved_witness = execute_program(&backend, circuit, &abi, &inputs_map)?; + let solved_witness = execute_program(backend, circuit, &abi, &inputs_map)?; let public_abi = abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -63,12 +66,12 @@ fn execute_with_path( Ok((return_value, solved_witness)) } -pub(crate) fn execute_program( - backend: &impl PartialWitnessGenerator, +pub(crate) fn execute_program( + backend: &B, circuit: Circuit, abi: &Abi, inputs_map: &InputMap, -) -> Result { +) -> Result> { let initial_witness = abi.encode(inputs_map, None)?; let solved_witness = nargo::ops::execute_circuit(backend, circuit, initial_witness)?; diff --git a/crates/nargo_cli/src/cli/fs/inputs.rs b/crates/nargo_cli/src/cli/fs/inputs.rs index dd9ce199720..eaf94cc22fd 100644 --- a/crates/nargo_cli/src/cli/fs/inputs.rs +++ b/crates/nargo_cli/src/cli/fs/inputs.rs @@ -4,7 +4,7 @@ use noirc_abi::{ }; use std::{collections::BTreeMap, path::Path}; -use crate::errors::CliError; +use crate::errors::FilesystemError; use super::write_to_file; @@ -20,14 +20,14 @@ pub(crate) fn read_inputs_from_file>( file_name: &str, format: Format, abi: &Abi, -) -> Result<(InputMap, Option), CliError> { +) -> Result<(InputMap, Option), FilesystemError> { if abi.is_empty() { return Ok((BTreeMap::new(), None)); } let file_path = path.as_ref().join(file_name).with_extension(format.ext()); if !file_path.exists() { - return Err(CliError::MissingTomlFile(file_name.to_owned(), file_path)); + return Err(FilesystemError::MissingTomlFile(file_name.to_owned(), file_path)); } let input_string = std::fs::read_to_string(file_path).unwrap(); @@ -43,7 +43,7 @@ pub(crate) fn write_inputs_to_file>( path: P, file_name: &str, format: Format, -) -> Result<(), CliError> { +) -> Result<(), FilesystemError> { let file_path = path.as_ref().join(file_name).with_extension(format.ext()); // We must insert the return value into the `InputMap` in order for it to be written to file. diff --git a/crates/nargo_cli/src/cli/fs/mod.rs b/crates/nargo_cli/src/cli/fs/mod.rs index d860f722fd1..4ebce3b3325 100644 --- a/crates/nargo_cli/src/cli/fs/mod.rs +++ b/crates/nargo_cli/src/cli/fs/mod.rs @@ -4,7 +4,7 @@ use std::{ path::{Path, PathBuf}, }; -use crate::errors::CliError; +use crate::errors::FilesystemError; pub(super) mod inputs; pub(super) mod program; @@ -32,11 +32,11 @@ pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { } } -pub(super) fn load_hex_data>(path: P) -> Result, CliError> { - let hex_data: Vec<_> = - std::fs::read(&path).map_err(|_| CliError::PathNotValid(path.as_ref().to_path_buf()))?; +pub(super) fn load_hex_data>(path: P) -> Result, FilesystemError> { + let hex_data: Vec<_> = std::fs::read(&path) + .map_err(|_| FilesystemError::PathNotValid(path.as_ref().to_path_buf()))?; - let raw_bytes = hex::decode(hex_data).map_err(CliError::HexArtifactNotValid)?; + let raw_bytes = hex::decode(hex_data).map_err(FilesystemError::HexArtifactNotValid)?; Ok(raw_bytes) } diff --git a/crates/nargo_cli/src/cli/fs/program.rs b/crates/nargo_cli/src/cli/fs/program.rs index a3b5f4026bd..871a6023837 100644 --- a/crates/nargo_cli/src/cli/fs/program.rs +++ b/crates/nargo_cli/src/cli/fs/program.rs @@ -2,7 +2,7 @@ use std::path::{Path, PathBuf}; use nargo::artifacts::{contract::PreprocessedContract, program::PreprocessedProgram}; -use crate::errors::CliError; +use crate::errors::FilesystemError; use super::{create_named_dir, write_to_file}; @@ -35,10 +35,11 @@ fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( pub(crate) fn read_program_from_file>( circuit_path: P, -) -> Result { +) -> Result { let file_path = circuit_path.as_ref().with_extension("json"); - let input_string = std::fs::read(&file_path).map_err(|_| CliError::PathNotValid(file_path))?; + let input_string = + std::fs::read(&file_path).map_err(|_| FilesystemError::PathNotValid(file_path))?; let program = serde_json::from_slice(&input_string).expect("could not deserialize program"); diff --git a/crates/nargo_cli/src/cli/fs/proof.rs b/crates/nargo_cli/src/cli/fs/proof.rs index 4a77595a54b..3a54aa908f8 100644 --- a/crates/nargo_cli/src/cli/fs/proof.rs +++ b/crates/nargo_cli/src/cli/fs/proof.rs @@ -1,6 +1,6 @@ use std::path::{Path, PathBuf}; -use crate::{constants::PROOF_EXT, errors::CliError}; +use crate::{constants::PROOF_EXT, errors::FilesystemError}; use super::{create_named_dir, write_to_file}; @@ -8,7 +8,7 @@ pub(crate) fn save_proof_to_dir>( proof: &[u8], proof_name: &str, proof_dir: P, -) -> Result { +) -> Result { create_named_dir(proof_dir.as_ref(), "proof"); let proof_path = proof_dir.as_ref().join(proof_name).with_extension(PROOF_EXT); diff --git a/crates/nargo_cli/src/cli/fs/witness.rs b/crates/nargo_cli/src/cli/fs/witness.rs index f3a5d3ea469..d41123e74fa 100644 --- a/crates/nargo_cli/src/cli/fs/witness.rs +++ b/crates/nargo_cli/src/cli/fs/witness.rs @@ -4,13 +4,13 @@ use acvm::acir::native_types::Witness; use noirc_abi::WitnessMap; use super::{create_named_dir, write_to_file}; -use crate::{constants::WITNESS_EXT, errors::CliError}; +use crate::{constants::WITNESS_EXT, errors::FilesystemError}; pub(crate) fn save_witness_to_dir>( witness: WitnessMap, witness_name: &str, witness_dir: P, -) -> Result { +) -> Result { create_named_dir(witness_dir.as_ref(), "witness"); let witness_path = witness_dir.as_ref().join(witness_name).with_extension(WITNESS_EXT); diff --git a/crates/nargo_cli/src/cli/gates_cmd.rs b/crates/nargo_cli/src/cli/gates_cmd.rs index 9fe9f5c7a53..88e11c683eb 100644 --- a/crates/nargo_cli/src/cli/gates_cmd.rs +++ b/crates/nargo_cli/src/cli/gates_cmd.rs @@ -1,4 +1,4 @@ -use acvm::ProofSystemCompiler; +use acvm::Backend; use clap::Args; use noirc_driver::CompileOptions; use std::path::Path; @@ -15,17 +15,20 @@ pub(crate) struct GatesCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: GatesCommand, config: NargoConfig) -> Result<(), CliError> { - count_gates_with_path(config.program_dir, &args.compile_options) +pub(crate) fn run( + backend: &B, + args: GatesCommand, + config: NargoConfig, +) -> Result<(), CliError> { + count_gates_with_path(backend, config.program_dir, &args.compile_options) } -fn count_gates_with_path>( +fn count_gates_with_path>( + backend: &B, program_dir: P, compile_options: &CompileOptions, -) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - - let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; +) -> Result<(), CliError> { + let compiled_program = compile_circuit(backend, program_dir.as_ref(), compile_options)?; let num_opcodes = compiled_program.circuit.opcodes.len(); println!( @@ -34,7 +37,9 @@ fn count_gates_with_path>( num_opcodes ); - let exact_circuit_size = backend.get_exact_circuit_size(&compiled_program.circuit); + let exact_circuit_size = backend + .get_exact_circuit_size(&compiled_program.circuit) + .map_err(CliError::ProofSystemCompilerError)?; println!("Backend circuit size: {exact_circuit_size}"); Ok(()) diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index 5450bb39660..d41dc1a815a 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -67,17 +67,19 @@ pub fn start_cli() -> eyre::Result<()> { config.program_dir = find_package_root(&config.program_dir)?; } + let backend = crate::backends::ConcreteBackend::default(); + match command { - NargoCommand::New(args) => new_cmd::run(args, config), - NargoCommand::Check(args) => check_cmd::run(args, config), - NargoCommand::Compile(args) => compile_cmd::run(args, config), - NargoCommand::Execute(args) => execute_cmd::run(args, config), - NargoCommand::Prove(args) => prove_cmd::run(args, config), - NargoCommand::Verify(args) => verify_cmd::run(args, config), - NargoCommand::Test(args) => test_cmd::run(args, config), - NargoCommand::Gates(args) => gates_cmd::run(args, config), - NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(args, config), - NargoCommand::PrintAcir(args) => print_acir_cmd::run(args, config), + NargoCommand::New(args) => new_cmd::run(&backend, args, config), + NargoCommand::Check(args) => check_cmd::run(&backend, args, config), + NargoCommand::Compile(args) => compile_cmd::run(&backend, args, config), + NargoCommand::Execute(args) => execute_cmd::run(&backend, args, config), + NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), + NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), + NargoCommand::Test(args) => test_cmd::run(&backend, args, config), + NargoCommand::Gates(args) => gates_cmd::run(&backend, args, config), + NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), + NargoCommand::PrintAcir(args) => print_acir_cmd::run(&backend, args, config), }?; Ok(()) @@ -85,6 +87,8 @@ pub fn start_cli() -> eyre::Result<()> { // helper function which tests noir programs by trying to generate a proof and verify it pub fn prove_and_verify(proof_name: &str, program_dir: &Path, show_ssa: bool) -> bool { + let backend = crate::backends::ConcreteBackend::default(); + let compile_options = CompileOptions { show_ssa, allow_warnings: false, @@ -94,6 +98,7 @@ pub fn prove_and_verify(proof_name: &str, program_dir: &Path, show_ssa: bool) -> let proof_dir = program_dir.join(PROOFS_DIR); match prove_cmd::prove_with_path( + &backend, Some(proof_name.to_owned()), program_dir, &proof_dir, diff --git a/crates/nargo_cli/src/cli/new_cmd.rs b/crates/nargo_cli/src/cli/new_cmd.rs index 36146028454..5868c1e820e 100644 --- a/crates/nargo_cli/src/cli/new_cmd.rs +++ b/crates/nargo_cli/src/cli/new_cmd.rs @@ -5,6 +5,7 @@ use crate::{ use super::fs::{create_named_dir, write_to_file}; use super::{NargoConfig, CARGO_PKG_VERSION}; +use acvm::Backend; use clap::Args; use const_format::formatcp; use std::path::{Path, PathBuf}; @@ -33,13 +34,18 @@ const EXAMPLE: &str = r#"fn main(x : Field, y : pub Field) { #[test] fn test_main() { main(1, 2); - + // Uncomment to make test fail // main(1, 1); } "#; -pub(crate) fn run(args: NewCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run( + // Backend is currently unused, but we might want to use it to inform the "new" template in the future + _backend: &B, + args: NewCommand, + config: NargoConfig, +) -> Result<(), CliError> { let package_dir = config.program_dir.join(args.package_name); if package_dir.exists() { diff --git a/crates/nargo_cli/src/cli/print_acir_cmd.rs b/crates/nargo_cli/src/cli/print_acir_cmd.rs index 38b841121bc..420c57c6a08 100644 --- a/crates/nargo_cli/src/cli/print_acir_cmd.rs +++ b/crates/nargo_cli/src/cli/print_acir_cmd.rs @@ -1,3 +1,4 @@ +use acvm::Backend; use clap::Args; use noirc_driver::CompileOptions; use std::path::Path; @@ -14,17 +15,20 @@ pub(crate) struct PrintAcirCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: PrintAcirCommand, config: NargoConfig) -> Result<(), CliError> { - print_acir_with_path(config.program_dir, &args.compile_options) +pub(crate) fn run( + backend: &B, + args: PrintAcirCommand, + config: NargoConfig, +) -> Result<(), CliError> { + print_acir_with_path(backend, config.program_dir, &args.compile_options) } -fn print_acir_with_path>( +fn print_acir_with_path>( + backend: &B, program_dir: P, compile_options: &CompileOptions, -) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - - let compiled_program = compile_circuit(&backend, program_dir.as_ref(), compile_options)?; +) -> Result<(), CliError> { + let compiled_program = compile_circuit(backend, program_dir.as_ref(), compile_options)?; println!("{}", compiled_program.circuit); Ok(()) diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index 2b9ed4b067a..d2adc8d4af1 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -1,5 +1,6 @@ use std::path::{Path, PathBuf}; +use acvm::Backend; use clap::Args; use nargo::artifacts::program::PreprocessedProgram; use nargo::ops::{preprocess_program, prove_execution, verify_proof}; @@ -38,7 +39,11 @@ pub(crate) struct ProveCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: ProveCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run( + backend: &B, + args: ProveCommand, + config: NargoConfig, +) -> Result<(), CliError> { let proof_dir = config.program_dir.join(PROOFS_DIR); let circuit_build_path = args @@ -46,6 +51,7 @@ pub(crate) fn run(args: ProveCommand, config: NargoConfig) -> Result<(), CliErro .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); prove_with_path( + backend, args.proof_name, config.program_dir, proof_dir, @@ -57,22 +63,21 @@ pub(crate) fn run(args: ProveCommand, config: NargoConfig) -> Result<(), CliErro Ok(()) } -pub(crate) fn prove_with_path>( +pub(crate) fn prove_with_path>( + backend: &B, proof_name: Option, program_dir: P, proof_dir: P, circuit_build_path: Option, check_proof: bool, compile_options: &CompileOptions, -) -> Result, CliError> { - let backend = crate::backends::ConcreteBackend::default(); - +) -> Result, CliError> { let preprocessed_program = match circuit_build_path { Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, None => { - let compiled_program = - compile_circuit(&backend, program_dir.as_ref(), compile_options)?; - preprocess_program(&backend, compiled_program)? + let compiled_program = compile_circuit(backend, program_dir.as_ref(), compile_options)?; + preprocess_program(backend, compiled_program) + .map_err(CliError::ProofSystemCompilerError)? } }; @@ -83,7 +88,7 @@ pub(crate) fn prove_with_path>( let (inputs_map, _) = read_inputs_from_file(&program_dir, PROVER_INPUT_FILE, Format::Toml, &abi)?; - let solved_witness = execute_program(&backend, bytecode.clone(), &abi, &inputs_map)?; + let solved_witness = execute_program(backend, bytecode.clone(), &abi, &inputs_map)?; // Write public inputs into Verifier.toml let public_abi = abi.public_abi(); @@ -97,12 +102,14 @@ pub(crate) fn prove_with_path>( Format::Toml, )?; - let proof = prove_execution(&backend, &bytecode, solved_witness, &proving_key)?; + let proof = prove_execution(backend, &bytecode, solved_witness, &proving_key) + .map_err(CliError::ProofSystemCompilerError)?; if check_proof { let public_inputs = public_abi.encode(&public_inputs, return_value)?; let valid_proof = - verify_proof(&backend, &bytecode, &proof, public_inputs, &verification_key)?; + verify_proof(backend, &bytecode, &proof, public_inputs, &verification_key) + .map_err(CliError::ProofSystemCompilerError)?; if !valid_proof { return Err(CliError::InvalidProof("".into())); diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index 65f8265a862..139d33b6c3d 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -1,6 +1,6 @@ use std::{collections::BTreeMap, io::Write, path::Path}; -use acvm::ProofSystemCompiler; +use acvm::Backend; use clap::Args; use nargo::ops::execute_circuit; use noirc_driver::{CompileOptions, Driver}; @@ -21,19 +21,22 @@ pub(crate) struct TestCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run( + backend: &B, + args: TestCommand, + config: NargoConfig, +) -> Result<(), CliError> { let test_name: String = args.test_name.unwrap_or_else(|| "".to_owned()); - run_tests(&config.program_dir, &test_name, &args.compile_options) + run_tests(backend, &config.program_dir, &test_name, &args.compile_options) } -fn run_tests( +fn run_tests( + backend: &B, program_dir: &Path, test_name: &str, compile_options: &CompileOptions, -) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - +) -> Result<(), CliError> { let mut driver = Resolver::resolve_root_manifest(program_dir, backend.np_language())?; driver.check_crate(compile_options).map_err(|_| CliError::CompilationError)?; @@ -50,7 +53,7 @@ fn run_tests( writeln!(writer, "Testing {test_name}...").expect("Failed to write to stdout"); writer.flush().ok(); - match run_test(test_name, test_function, &driver, compile_options) { + match run_test(backend, test_name, test_function, &driver, compile_options) { Ok(_) => { writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).ok(); writeln!(writer, "ok").ok(); @@ -73,21 +76,20 @@ fn run_tests( Ok(()) } -fn run_test( +fn run_test( + backend: &B, test_name: &str, main: FuncId, driver: &Driver, config: &CompileOptions, -) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - +) -> Result<(), CliError> { let program = driver .compile_no_check(config, main) .map_err(|_| CliError::Generic(format!("Test '{test_name}' failed to compile")))?; // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. - match execute_circuit(&backend, program.circuit, BTreeMap::new()) { + match execute_circuit(backend, program.circuit, BTreeMap::new()) { Ok(_) => Ok(()), Err(error) => { let writer = StandardStream::stderr(ColorChoice::Always); diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index 84a6416d44e..07ba12d3899 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -6,6 +6,7 @@ use crate::{ errors::CliError, }; +use acvm::Backend; use clap::Args; use nargo::artifacts::program::PreprocessedProgram; use nargo::ops::preprocess_program; @@ -26,7 +27,11 @@ pub(crate) struct VerifyCommand { compile_options: CompileOptions, } -pub(crate) fn run(args: VerifyCommand, config: NargoConfig) -> Result<(), CliError> { +pub(crate) fn run( + backend: &B, + args: VerifyCommand, + config: NargoConfig, +) -> Result<(), CliError> { let proof_path = config.program_dir.join(PROOFS_DIR).join(&args.proof).with_extension(PROOF_EXT); @@ -35,6 +40,7 @@ pub(crate) fn run(args: VerifyCommand, config: NargoConfig) -> Result<(), CliErr .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); verify_with_path( + backend, &config.program_dir, proof_path, circuit_build_path.as_ref(), @@ -42,20 +48,20 @@ pub(crate) fn run(args: VerifyCommand, config: NargoConfig) -> Result<(), CliErr ) } -fn verify_with_path>( +fn verify_with_path>( + backend: &B, program_dir: P, proof_path: PathBuf, circuit_build_path: Option

, compile_options: CompileOptions, -) -> Result<(), CliError> { - let backend = crate::backends::ConcreteBackend::default(); - +) -> Result<(), CliError> { let preprocessed_program = match circuit_build_path { Some(circuit_build_path) => read_program_from_file(circuit_build_path)?, None => { let compiled_program = - compile_circuit(&backend, program_dir.as_ref(), &compile_options)?; - preprocess_program(&backend, compiled_program)? + compile_circuit(backend, program_dir.as_ref(), &compile_options)?; + preprocess_program(backend, compiled_program) + .map_err(CliError::ProofSystemCompilerError)? } }; @@ -70,7 +76,8 @@ fn verify_with_path>( let proof = load_hex_data(&proof_path)?; let valid_proof = - nargo::ops::verify_proof(&backend, &bytecode, &proof, public_inputs, &verification_key)?; + nargo::ops::verify_proof(backend, &bytecode, &proof, public_inputs, &verification_key) + .map_err(CliError::ProofSystemCompilerError)?; if valid_proof { Ok(()) diff --git a/crates/nargo_cli/src/errors.rs b/crates/nargo_cli/src/errors.rs index f6537b550ea..15d1917a5d6 100644 --- a/crates/nargo_cli/src/errors.rs +++ b/crates/nargo_cli/src/errors.rs @@ -1,3 +1,4 @@ +use acvm::{Backend, ProofSystemCompiler, SmartContract}; use hex::FromHexError; use nargo::NargoError; use noirc_abi::errors::{AbiError, InputParserError}; @@ -7,11 +8,7 @@ use thiserror::Error; use crate::resolver::DependencyResolutionError; #[derive(Debug, Error)] -pub(crate) enum CliError { - #[error("{0}")] - Generic(String), - #[error("Error: destination {} already exists", .0.display())] - DestinationAlreadyExists(PathBuf), +pub(crate) enum FilesystemError { #[error("Error: {} is not a valid path\nRun either `nargo compile` to generate missing build artifacts or `nargo prove` to construct a proof", .0.display())] PathNotValid(PathBuf), #[error("Error: could not parse hex build artifact (proof, proving and/or verification keys, ACIR checksum) ({0})")] @@ -21,6 +18,18 @@ pub(crate) enum CliError { )] MissingTomlFile(String, PathBuf), + /// Input parsing error + #[error(transparent)] + InputParserError(#[from] InputParserError), +} + +#[derive(Debug, Error)] +pub(crate) enum CliError { + #[error("{0}")] + Generic(String), + #[error("Error: destination {} already exists", .0.display())] + DestinationAlreadyExists(PathBuf), + #[error("Failed to verify proof {}", .0.display())] InvalidProof(PathBuf), @@ -31,15 +40,23 @@ pub(crate) enum CliError { #[error("Failed to compile circuit")] CompilationError, - /// Input parsing error - #[error(transparent)] - InputParserError(#[from] InputParserError), - /// ABI encoding/decoding error #[error(transparent)] AbiError(#[from] AbiError), + /// Filesystem errors + #[error(transparent)] + FilesystemError(#[from] FilesystemError), + /// Error from Nargo #[error(transparent)] NargoError(#[from] NargoError), + + /// Backend error caused by a function on the SmartContract trait + #[error(transparent)] + SmartContractError(::Error), // Unfortunately, Rust won't let us `impl From` over an Associated Type on a generic + + /// Backend error caused by a function on the ProofSystemCompiler trait + #[error(transparent)] + ProofSystemCompilerError(::Error), // Unfortunately, Rust won't let us `impl From` over an Associated Type on a generic } From 7df3bb1b09d5da500bff5490ab2723a7802e8e9c Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 12 May 2023 11:14:48 -0400 Subject: [PATCH 61/66] chore(ssa refactor): Simplify inlining pass and fix inlining failure (#1337) * Fix bug in inlining pass * Work on functions with multiple returns too * Forgot to translate_block --- .../src/ssa_refactor/opt/inlining.rs | 154 +++++++++++++++--- 1 file changed, 127 insertions(+), 27 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs index 6848f84bb7b..50c97b765bb 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs @@ -79,10 +79,6 @@ struct PerFunctionContext<'function> { /// Maps InstructionIds from the function being inlined to the function being inlined into. instructions: HashMap, - /// The TerminatorInstruction::Return in the source_function will be mapped to a jmp to - /// this block in the destination function instead. - return_destination: BasicBlockId, - /// True if we're currently working on the main function. inlining_main: bool, } @@ -124,7 +120,12 @@ impl InlineContext { /// Inlines a function into the current function and returns the translated return values /// of the inlined function. - fn inline_function(&mut self, ssa: &Ssa, id: FunctionId, arguments: &[ValueId]) -> &[ValueId] { + fn inline_function( + &mut self, + ssa: &Ssa, + id: FunctionId, + arguments: &[ValueId], + ) -> Vec { self.recursion_level += 1; if self.recursion_level > RECURSION_LIMIT { @@ -143,9 +144,7 @@ impl InlineContext { let current_block = context.context.builder.current_block(); context.blocks.insert(source_function.entry_block(), current_block); - context.inline_blocks(ssa); - let return_destination = context.return_destination; - self.builder.block_parameters(return_destination) + context.inline_blocks(ssa) } /// Finish inlining and return the new Ssa struct with the inlined version of main. @@ -175,10 +174,7 @@ impl<'function> PerFunctionContext<'function> { /// for containing the mapping between parameters in the source_function and /// the arguments of the destination function. fn new(context: &'function mut InlineContext, source_function: &'function Function) -> Self { - // Create the block to return to but don't insert its parameters until we - // have the types of the actual return values later. Self { - return_destination: context.builder.insert_block(), context, source_function, blocks: HashMap::new(), @@ -265,20 +261,60 @@ impl<'function> PerFunctionContext<'function> { } /// Inline all reachable blocks within the source_function into the destination function. - fn inline_blocks(&mut self, ssa: &Ssa) { + fn inline_blocks(&mut self, ssa: &Ssa) -> Vec { let mut seen_blocks = HashSet::new(); let mut block_queue = vec![self.source_function.entry_block()]; + // This Vec will contain each block with a Return instruction along with the + // returned values of that block. + let mut function_returns = vec![]; + while let Some(source_block_id) = block_queue.pop() { let translated_block_id = self.translate_block(source_block_id, &mut block_queue); self.context.builder.switch_to_block(translated_block_id); seen_blocks.insert(source_block_id); self.inline_block(ssa, source_block_id); - self.handle_terminator_instruction(source_block_id, &mut block_queue); + + if let Some((block, values)) = + self.handle_terminator_instruction(source_block_id, &mut block_queue) + { + function_returns.push((block, values)); + } } - self.context.builder.switch_to_block(self.return_destination); + self.handle_function_returns(function_returns) + } + + /// Handle inlining a function's possibly multiple return instructions. + /// If there is only 1 return we can just continue inserting into that block. + /// If there are multiple, we'll need to create a join block to jump to with each value. + fn handle_function_returns( + &mut self, + mut returns: Vec<(BasicBlockId, Vec)>, + ) -> Vec { + // Clippy complains if this were written as an if statement + match returns.len() { + 1 => { + let (return_block, return_values) = returns.remove(0); + self.context.builder.switch_to_block(return_block); + return_values + } + n if n > 1 => { + // If there is more than 1 return instruction we'll need to create a single block we + // can return to and continue inserting in afterwards. + let return_block = self.context.builder.insert_block(); + + for (block, return_values) in returns { + self.context.builder.switch_to_block(block); + self.context.builder.terminate_with_jmp(return_block, return_values); + } + + self.context.builder.switch_to_block(return_block); + self.context.builder.block_parameters(return_block).to_vec() + } + _ => unreachable!("Inlined function had no return values"), + } } /// Inline each instruction in the given block into the function being inlined into. @@ -307,7 +343,7 @@ impl<'function> PerFunctionContext<'function> { let old_results = self.source_function.dfg.instruction_results(call_id); let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); let new_results = self.context.inline_function(ssa, function, &arguments); - Self::insert_new_instruction_results(&mut self.values, old_results, new_results); + Self::insert_new_instruction_results(&mut self.values, old_results, &new_results); } /// Push the given instruction from the source_function into the current block of the @@ -340,16 +376,20 @@ impl<'function> PerFunctionContext<'function> { /// Handle the given terminator instruction from the given source function block. /// This will push any new blocks to the destination function as needed, add them /// to the block queue, and set the terminator instruction for the current block. + /// + /// If the terminator instruction was a Return, this will return the block this instruction + /// was in as well as the values that were returned. fn handle_terminator_instruction( &mut self, block_id: BasicBlockId, block_queue: &mut Vec, - ) { + ) -> Option<(BasicBlockId, Vec)> { match self.source_function.dfg[block_id].terminator() { Some(TerminatorInstruction::Jmp { destination, arguments }) => { let destination = self.translate_block(*destination, block_queue); let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); self.context.builder.terminate_with_jmp(destination, arguments); + None } Some(TerminatorInstruction::JmpIf { condition, @@ -360,21 +400,15 @@ impl<'function> PerFunctionContext<'function> { let then_block = self.translate_block(*then_destination, block_queue); let else_block = self.translate_block(*else_destination, block_queue); self.context.builder.terminate_with_jmpif(condition, then_block, else_block); + None } Some(TerminatorInstruction::Return { return_values }) => { let return_values = vecmap(return_values, |value| self.translate_value(*value)); - if self.inlining_main { - self.context.builder.terminate_with_return(return_values); - } else { - for value in &return_values { - // Add the block parameters for the return block here since we don't do - // it when inserting the block in PerFunctionContext::new - let typ = self.context.builder.current_function.dfg.type_of_value(*value); - self.context.builder.add_block_parameter(self.return_destination, typ); - } - self.context.builder.terminate_with_jmp(self.return_destination, return_values); + self.context.builder.terminate_with_return(return_values.clone()); } + let block_id = self.translate_block(block_id, block_queue); + Some((block_id, return_values)) } None => unreachable!("Block has no terminator instruction"), } @@ -384,7 +418,7 @@ impl<'function> PerFunctionContext<'function> { #[cfg(test)] mod test { use crate::ssa_refactor::{ - ir::{map::Id, types::Type}, + ir::{instruction::BinaryOp, map::Id, types::Type}, ssa_builder::FunctionBuilder, }; @@ -418,4 +452,70 @@ mod test { let inlined = ssa.inline_functions(); assert_eq!(inlined.functions.len(), 1); } + + #[test] + fn complex_inlining() { + // This SSA is from issue #1327 which previously failed to inline properly + // + // fn main f0 { + // b0(v0: Field): + // v7 = call f2(f1) + // v13 = call f3(v7) + // v16 = call v13(v0) + // return v16 + // } + // fn square f1 { + // b0(v0: Field): + // v2 = mul v0, v0 + // return v2 + // } + // fn id1 f2 { + // b0(v0: function): + // return v0 + // } + // fn id2 f3 { + // b0(v0: function): + // return v0 + // } + let main_id = Id::test_new(0); + let square_id = Id::test_new(1); + let id1_id = Id::test_new(2); + let id2_id = Id::test_new(3); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + let main_v0 = builder.add_parameter(Type::field()); + + let main_f1 = builder.import_function(square_id); + let main_f2 = builder.import_function(id1_id); + let main_f3 = builder.import_function(id2_id); + + let main_v7 = builder.insert_call(main_f2, vec![main_f1], vec![Type::Function])[0]; + let main_v13 = builder.insert_call(main_f3, vec![main_v7], vec![Type::Function])[0]; + let main_v16 = builder.insert_call(main_v13, vec![main_v0], vec![Type::field()])[0]; + builder.terminate_with_return(vec![main_v16]); + + // Compiling square f1 + builder.new_function("square".into(), square_id); + let square_v0 = builder.add_parameter(Type::field()); + let square_v2 = builder.insert_binary(square_v0, BinaryOp::Mul, square_v0); + builder.terminate_with_return(vec![square_v2]); + + // Compiling id1 f2 + builder.new_function("id1".into(), id1_id); + let id1_v0 = builder.add_parameter(Type::Function); + builder.terminate_with_return(vec![id1_v0]); + + // Compiling id2 f3 + builder.new_function("id2".into(), id2_id); + let id2_v0 = builder.add_parameter(Type::Function); + builder.terminate_with_return(vec![id2_v0]); + + // Done, now we test that we can successfully inline all functions. + let ssa = builder.finish(); + assert_eq!(ssa.functions.len(), 4); + + let inlined = ssa.inline_functions(); + assert_eq!(inlined.functions.len(), 1); + } } From c3bdec294234e92a73f39720ec7202fbb17ddc79 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Mon, 15 May 2023 19:54:24 +0200 Subject: [PATCH 62/66] feat: enable to_radix for any field element (#1343) * Enable to_radix for any field element * add integration test * use proper bound during modulo (and small optimisation) * update integration test --- .../to_bytes_integration/Prover.toml | 1 + .../to_bytes_integration/src/main.nr | 15 ++- .../src/ssa/acir_gen/constraints.rs | 99 ++++++++++++++++--- 3 files changed, 100 insertions(+), 15 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/to_bytes_integration/Prover.toml b/crates/nargo_cli/tests/test_data/to_bytes_integration/Prover.toml index 07fe857ac7c..23f7acea449 100644 --- a/crates/nargo_cli/tests/test_data/to_bytes_integration/Prover.toml +++ b/crates/nargo_cli/tests/test_data/to_bytes_integration/Prover.toml @@ -1 +1,2 @@ x = "2040124" +_y = "0x2000000000000000000000000000000000000000000000000000000000000000" diff --git a/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr b/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr index 1932b7556a8..36e6d430e2e 100644 --- a/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr +++ b/crates/nargo_cli/tests/test_data/to_bytes_integration/src/main.nr @@ -1,6 +1,6 @@ use dep::std; -fn main(x : Field) { +fn main(x : Field, _y: Field) { // The result of this byte array will be big-endian let y: Field = 2040124; let be_byte_array = y.to_be_bytes(31); @@ -11,4 +11,17 @@ fn main(x : Field) { assert(le_byte_array[0] == be_byte_array[30]); assert(le_byte_array[1] == be_byte_array[29]); assert(le_byte_array[2] == be_byte_array[28]); + + let z = 0 - 1; + let p_bytes = std::field::modulus_le_bytes(); + let z_bytes = z.to_le_bytes(32); + assert(p_bytes[10] == z_bytes[10]); + assert(p_bytes[0] == z_bytes[0] as u8 + 1 as u8); + + let p_bits = std::field::modulus_le_bits(); + let z_bits = z.to_le_bits(std::field::modulus_num_bits() as u32); + assert(z_bits[0] == 0); + assert(p_bits[100] == z_bits[100]); + + _y.to_le_bits(std::field::modulus_num_bits() as u32); } \ No newline at end of file diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs index 11371dc54a6..0f8ad42620d 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/constraints.rs @@ -405,25 +405,43 @@ pub(crate) fn to_radix_base( evaluator: &mut Evaluator, ) -> Vec { // ensure there is no overflow - let mut max = BigUint::from(radix); - max = max.pow(limb_size) - BigUint::one(); - assert!(max < FieldElement::modulus()); + let rad = BigUint::from(radix); + let max = rad.pow(limb_size) - BigUint::one(); - let (mut result, bytes) = to_radix_little(radix, limb_size, evaluator); + if max < FieldElement::modulus() { + let (mut result, bytes) = to_radix_little(radix, limb_size, evaluator); - evaluator.push_opcode(AcirOpcode::Directive(Directive::ToLeRadix { - a: lhs.clone(), - b: result.clone(), - radix, - })); + evaluator.push_opcode(AcirOpcode::Directive(Directive::ToLeRadix { + a: lhs.clone(), + b: result.clone(), + radix, + })); - if endianness == Endian::Big { - result.reverse(); - } + if endianness == Endian::Big { + result.reverse(); + } - evaluator.push_opcode(AcirOpcode::Arithmetic(subtract(lhs, FieldElement::one(), &bytes))); + evaluator.push_opcode(AcirOpcode::Arithmetic(subtract(lhs, FieldElement::one(), &bytes))); + result + } else { + let min = rad.pow(limb_size - 1) - BigUint::one(); + assert!(min < FieldElement::modulus()); + + let max_bits = max.bits() as u32; + let a = evaluate_constant_modulo(lhs, radix, max_bits, evaluator) + .to_witness() + .expect("Constant expressions should already be simplified"); + let y = subtract(lhs, FieldElement::one(), &Expression::from(a)); + let radix_f = FieldElement::from(radix as i128); + let y = Expression::default().add_mul(FieldElement::one() / radix_f, &y); + let mut b = to_radix_base(&y, radix, limb_size - 1, endianness, evaluator); + match endianness { + Endian::Little => b.insert(0, a), + Endian::Big => b.push(a), + } - result + b + } } //Decomposition into b-base: \sum ai b^i, where 0<=ai Expression { + let modulus = FieldElement::from(rhs as i128); + let modulus_exp = Expression::from_field(modulus); + assert_ne!(rhs, 0); + let modulus_bits = bit_size_u128((rhs - 1) as u128); + assert!(max_bits >= rhs, "max_bits = {max_bits}, rhs = {rhs}"); + //0. Check for constant expression. This can happen through arithmetic simplifications + if let Some(a_c) = lhs.to_const() { + let mut a_big = BigUint::from_bytes_be(&a_c.to_be_bytes()); + a_big %= BigUint::from_bytes_be(&modulus.to_be_bytes()); + return Expression::from(FieldElement::from_be_bytes_reduce(&a_big.to_bytes_be())); + } + + //1. Generate witnesses b,c + let b_witness = evaluator.add_witness_to_cs(); + let c_witness = evaluator.add_witness_to_cs(); + evaluator.push_opcode(AcirOpcode::Directive(Directive::Quotient { + a: lhs.clone(), + b: modulus_exp.clone(), + q: c_witness, + r: b_witness, + predicate: None, + })); + bound_constraint_with_offset( + &Expression::from(b_witness), + &modulus_exp, + &Expression::one(), + modulus_bits, + evaluator, + ); + //if rhs is a power of 2, then we avoid this range check as it is redundant with the previous one. + if rhs & (rhs - 1) != 0 { + try_range_constraint(b_witness, modulus_bits, evaluator); + } + let c_bound = FieldElement::modulus() / BigUint::from(rhs) - BigUint::one(); + try_range_constraint(c_witness, c_bound.bits() as u32, evaluator); + + //2. Add the constraint lhs = b+q*rhs + let b_arith = b_witness.into(); + let c_arith = c_witness.into(); + let res = add(&b_arith, modulus, &c_arith); + let my_constraint = add(&res, -FieldElement::one(), lhs); + evaluator.push_opcode(AcirOpcode::Arithmetic(my_constraint)); + + Expression::from(b_witness) +} + pub(crate) fn evaluate_udiv( lhs: &Expression, rhs: &Expression, From 0181813203a9e3e46c6d8c3169ad5d25971d4282 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Tue, 16 May 2023 11:01:39 +0100 Subject: [PATCH 63/66] chore(noir): Release 0.6.0 (#1279) * chore(noir): Release 0.6.0 * chore: Update lockfile --- CHANGELOG.md | 33 +++++++++++++++++++++++++++++++++ Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- flake.nix | 2 +- 4 files changed, 46 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c554330a470..1a8724fe497 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.6.0](https://github.com/noir-lang/noir/compare/v0.5.1...v0.6.0) (2023-05-16) + + +### ⚠ BREAKING CHANGES + +* Update to acvm 0.11.0 ([#1322](https://github.com/noir-lang/noir/issues/1322)) +* **parser:** deprecate `constrain` keyword for `assert` ([#1286](https://github.com/noir-lang/noir/issues/1286)) + +### Features + +* Enable `to_radix` for any field element ([#1343](https://github.com/noir-lang/noir/issues/1343)) ([c3bdec2](https://github.com/noir-lang/noir/commit/c3bdec294234e92a73f39720ec7202fbb17ddc79)) +* Enable dynamic arrays ([#1271](https://github.com/noir-lang/noir/issues/1271)) ([9f43450](https://github.com/noir-lang/noir/commit/9f434507fa431a9dbf4130374b866a5de6176d76)) +* Issue an error when attempting to use a `return` expression ([#1330](https://github.com/noir-lang/noir/issues/1330)) ([a6de557](https://github.com/noir-lang/noir/commit/a6de557e83eb6318d091e40553bb3e2b3823fdc5)) +* **nargo:** Remove usage of `CompiledProgram` in CLI code and use separate ABI/bytecode ([#1269](https://github.com/noir-lang/noir/issues/1269)) ([f144391](https://github.com/noir-lang/noir/commit/f144391b4295b127f3f422e862a087a90dac1dbf)) +* **ssa refactor:** experimental-ssa compiler flag ([#1289](https://github.com/noir-lang/noir/issues/1289)) ([afa6749](https://github.com/noir-lang/noir/commit/afa67494c564b68b667535f2d8ef234fbc4bec12)) +* **ssa refactor:** Implement dominator tree ([#1278](https://github.com/noir-lang/noir/issues/1278)) ([144ebf5](https://github.com/noir-lang/noir/commit/144ebf51522fb19847be28de5595247051fcd92e)) +* **ssa:** add block opcode ([#1291](https://github.com/noir-lang/noir/issues/1291)) ([951ad71](https://github.com/noir-lang/noir/commit/951ad71e0f8bc9a6e95ae21197854396ed7f6e78)) +* **stdlib:** add keccak256 foreign function ([#1249](https://github.com/noir-lang/noir/issues/1249)) ([260d87d](https://github.com/noir-lang/noir/commit/260d87d1ef86069a1fcf0f9b4969589273e381d1)) + + +### Bug Fixes + +* Fix issue with parsing nested generics ([#1319](https://github.com/noir-lang/noir/issues/1319)) ([36f5b8e](https://github.com/noir-lang/noir/commit/36f5b8e88fe8048ece1a54755789d56c8803b3ab)) +* Fix parser error preventing assignments to tuple fields ([#1318](https://github.com/noir-lang/noir/issues/1318)) ([460568e](https://github.com/noir-lang/noir/commit/460568e50a810f90db6559195492547095ab8c32)) +* Fix struct or tuple field assignment failing with generics ([#1317](https://github.com/noir-lang/noir/issues/1317)) ([d872890](https://github.com/noir-lang/noir/commit/d872890e408ada056e9aab84a7774dcaa2049324)), closes [#1315](https://github.com/noir-lang/noir/issues/1315) +* **stdlib:** support use of `to_bits` and `to_radix` for values >128 bits ([#1312](https://github.com/noir-lang/noir/issues/1312)) ([12f3e7e](https://github.com/noir-lang/noir/commit/12f3e7e5917fdcb6b8648032772a7541eaef4751)) + + +### Miscellaneous Chores + +* **parser:** deprecate `constrain` keyword for `assert` ([#1286](https://github.com/noir-lang/noir/issues/1286)) ([9740f54](https://github.com/noir-lang/noir/commit/9740f54c28f30ea9367897fa986d8aea1aba79f2)) +* Update to acvm 0.11.0 ([#1322](https://github.com/noir-lang/noir/issues/1322)) ([da47368](https://github.com/noir-lang/noir/commit/da473685524fc6e5e17f9c3eb95116378ac41fb8)) + ## [0.5.1](https://github.com/noir-lang/noir/compare/v0.5.0...v0.5.1) (2023-05-01) diff --git a/Cargo.lock b/Cargo.lock index c7a30e3adc0..36e2539b098 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -140,7 +140,7 @@ checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "arena" -version = "0.5.1" +version = "0.6.0" dependencies = [ "generational-arena", ] @@ -1171,7 +1171,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.5.1" +version = "0.6.0" dependencies = [ "cfg-if 1.0.0", "codespan-reporting 0.9.5", @@ -1648,7 +1648,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.5.1" +version = "0.6.0" [[package]] name = "itertools" @@ -1857,7 +1857,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "iter-extended", @@ -1871,7 +1871,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "acvm-backend-barretenberg", @@ -1902,7 +1902,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "build-data", @@ -1918,7 +1918,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "iter-extended", @@ -1932,7 +1932,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "clap", @@ -1947,7 +1947,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.5.1" +version = "0.6.0" dependencies = [ "chumsky", "codespan", @@ -1958,7 +1958,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "arena", @@ -1974,7 +1974,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.5.1" +version = "0.6.0" dependencies = [ "acvm", "arena", diff --git a/Cargo.toml b/Cargo.toml index 4169aa9b6eb..1b9b9d61f90 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ default-members = ["crates/nargo_cli"] [workspace.package] # x-release-please-start-version -version = "0.5.1" +version = "0.6.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" diff --git a/flake.nix b/flake.nix index 7109e266e0f..c15a1e51061 100644 --- a/flake.nix +++ b/flake.nix @@ -106,7 +106,7 @@ commonArgs = environment // { pname = "noir"; # x-release-please-start-version - version = "0.5.1"; + version = "0.6.0"; # x-release-please-end # Use our custom stdenv to build and test our Rust project From 63d84a30fcbc117443cd3b404e872cb3c2f26111 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 16 May 2023 07:12:14 -0400 Subject: [PATCH 64/66] chore(ssa refactor): Add basic instruction simplification (#1329) * Add basic instruction simplification * Cargo fmt * Add comments --- .../src/ssa_refactor/ir/dfg.rs | 70 +++++- .../src/ssa_refactor/ir/instruction.rs | 202 +++++++++++++++++- .../src/ssa_refactor/opt/inlining.rs | 19 +- .../src/ssa_refactor/ssa_builder/mod.rs | 19 +- 4 files changed, 285 insertions(+), 25 deletions(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index 3ab345f06b9..fc15f3e2168 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -100,15 +100,21 @@ impl DataFlowGraph { id } - /// Replace an instruction id with another. - /// - /// This function should generally be avoided if possible in favor of inserting new - /// instructions since it does not check whether the instruction results of the removed - /// instruction are still in use. Users of this function thus need to ensure the old - /// instruction's results are no longer in use or are otherwise compatible with the - /// new instruction's result count and types. - pub(crate) fn replace_instruction(&mut self, id: Id, instruction: Instruction) { - self.instructions[id] = instruction; + /// Inserts a new instruction at the end of the given block and returns its results + pub(crate) fn insert_instruction( + &mut self, + instruction: Instruction, + block: BasicBlockId, + ctrl_typevars: Option>, + ) -> InsertInstructionResult { + match instruction.simplify(self) { + Some(simplification) => InsertInstructionResult::SimplifiedTo(simplification), + None => { + let id = self.make_instruction(instruction, ctrl_typevars); + self.insert_instruction_in_block(block, id); + InsertInstructionResult::Results(self.instruction_results(id)) + } + } } /// Insert a value into the dfg's storage and return an id to reference it. @@ -300,6 +306,52 @@ impl std::ops::IndexMut for DataFlowGraph { } } +// The result of calling DataFlowGraph::insert_instruction can +// be a list of results or a single ValueId if the instruction was simplified +// to an existing value. +pub(crate) enum InsertInstructionResult<'dfg> { + Results(&'dfg [ValueId]), + SimplifiedTo(ValueId), + InstructionRemoved, +} + +impl<'dfg> InsertInstructionResult<'dfg> { + /// Retrieve the first (and expected to be the only) result. + pub(crate) fn first(&self) -> ValueId { + match self { + InsertInstructionResult::SimplifiedTo(value) => *value, + InsertInstructionResult::Results(results) => results[0], + InsertInstructionResult::InstructionRemoved => { + panic!("Instruction was removed, no results") + } + } + } + + /// Return all the results contained in the internal results array. + /// This is used for instructions returning multiple results that were + /// not simplified - like function calls. + pub(crate) fn results(&self) -> &'dfg [ValueId] { + match self { + InsertInstructionResult::Results(results) => results, + InsertInstructionResult::SimplifiedTo(_) => { + panic!("InsertInstructionResult::results called on a simplified instruction") + } + InsertInstructionResult::InstructionRemoved => { + panic!("InsertInstructionResult::results called on a removed instruction") + } + } + } + + /// Returns the amount of ValueIds contained + pub(crate) fn len(&self) -> usize { + match self { + InsertInstructionResult::SimplifiedTo(_) => 1, + InsertInstructionResult::Results(results) => results.len(), + InsertInstructionResult::InstructionRemoved => 0, + } + } +} + #[cfg(test)] mod tests { use super::DataFlowGraph; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 812d12b23a3..42968568dee 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,7 +1,13 @@ -use acvm::acir::BlackBoxFunc; +use acvm::{acir::BlackBoxFunc, FieldElement}; use iter_extended::vecmap; -use super::{basic_block::BasicBlockId, map::Id, types::Type, value::ValueId}; +use super::{ + basic_block::BasicBlockId, + dfg::DataFlowGraph, + map::Id, + types::Type, + value::{Value, ValueId}, +}; /// Reference to an instruction /// @@ -151,6 +157,48 @@ impl Instruction { } } } + + /// Try to simplify this instruction. If the instruction can be simplified to a known value, + /// that value is returned. Otherwise None is returned. + pub(crate) fn simplify(&self, dfg: &mut DataFlowGraph) -> Option { + match self { + Instruction::Binary(binary) => binary.simplify(dfg), + Instruction::Cast(value, typ) => (*typ == dfg.type_of_value(*value)).then_some(*value), + Instruction::Not(value) => { + match &dfg[*value] { + // Limit optimizing ! on constants to only booleans. If we tried it on fields, + // there is no Not on FieldElement, so we'd need to convert between u128. This + // would be incorrect however since the extra bits on the field would not be flipped. + Value::NumericConstant { constant, typ } if *typ == Type::bool() => { + let value = dfg[*constant].value().is_zero() as u128; + Some(dfg.make_constant(value.into(), Type::bool())) + } + Value::Instruction { instruction, .. } => { + // !!v => v + match &dfg[*instruction] { + Instruction::Not(value) => Some(*value), + _ => None, + } + } + _ => None, + } + } + Instruction::Constrain(value) => { + if let Some(constant) = dfg.get_numeric_constant(*value) { + if constant.is_one() { + // "simplify" to a unit literal that will just be thrown away anyway + return Some(dfg.make_constant(0u128.into(), Type::Unit)); + } + } + None + } + Instruction::Truncate { .. } => None, + Instruction::Call { .. } => None, + Instruction::Allocate { .. } => None, + Instruction::Load { .. } => None, + Instruction::Store { .. } => None, + } + } } /// The possible return values for Instruction::return_types @@ -219,6 +267,156 @@ impl Binary { _ => InstructionResultType::Operand(self.lhs), } } + + /// Try to simplify this binary instruction, returning the new value if possible. + fn simplify(&self, dfg: &mut DataFlowGraph) -> Option { + let lhs = dfg.get_numeric_constant(self.lhs); + let rhs = dfg.get_numeric_constant(self.rhs); + let operand_type = dfg.type_of_value(self.lhs); + + if let (Some(lhs), Some(rhs)) = (lhs, rhs) { + return self.eval_constants(dfg, lhs, rhs, operand_type); + } + + let lhs_is_zero = lhs.map_or(false, |lhs| lhs.is_zero()); + let rhs_is_zero = rhs.map_or(false, |rhs| rhs.is_zero()); + + let lhs_is_one = lhs.map_or(false, |lhs| lhs.is_one()); + let rhs_is_one = rhs.map_or(false, |rhs| rhs.is_one()); + + match self.operator { + BinaryOp::Add => { + if lhs_is_zero { + return Some(self.rhs); + } + if rhs_is_zero { + return Some(self.lhs); + } + } + BinaryOp::Sub => { + if rhs_is_zero { + return Some(self.lhs); + } + } + BinaryOp::Mul => { + if lhs_is_one { + return Some(self.rhs); + } + if rhs_is_one { + return Some(self.lhs); + } + } + BinaryOp::Div => { + if rhs_is_one { + return Some(self.lhs); + } + } + BinaryOp::Mod => { + if rhs_is_one { + return Some(self.lhs); + } + } + BinaryOp::Eq => { + if self.lhs == self.rhs { + return Some(dfg.make_constant(FieldElement::one(), Type::bool())); + } + } + BinaryOp::Lt => { + if self.lhs == self.rhs { + return Some(dfg.make_constant(FieldElement::zero(), Type::bool())); + } + } + BinaryOp::And => { + if lhs_is_zero || rhs_is_zero { + return Some(dfg.make_constant(FieldElement::zero(), operand_type)); + } + } + BinaryOp::Or => { + if lhs_is_zero { + return Some(self.rhs); + } + if rhs_is_zero { + return Some(self.lhs); + } + } + BinaryOp::Xor => (), + BinaryOp::Shl => { + if rhs_is_zero { + return Some(self.lhs); + } + } + BinaryOp::Shr => { + if rhs_is_zero { + return Some(self.lhs); + } + } + } + None + } + + /// Evaluate the two constants with the operation specified by self.operator. + /// Pushes the resulting value to the given DataFlowGraph's constants and returns it. + fn eval_constants( + &self, + dfg: &mut DataFlowGraph, + lhs: FieldElement, + rhs: FieldElement, + operand_type: Type, + ) -> Option> { + let value = match self.operator { + BinaryOp::Add => lhs + rhs, + BinaryOp::Sub => lhs - rhs, + BinaryOp::Mul => lhs * rhs, + BinaryOp::Div => lhs / rhs, + BinaryOp::Eq => (lhs == rhs).into(), + BinaryOp::Lt => (lhs < rhs).into(), + + // The rest of the operators we must try to convert to u128 first + BinaryOp::Mod => self.eval_constant_u128_operations(lhs, rhs)?, + BinaryOp::And => self.eval_constant_u128_operations(lhs, rhs)?, + BinaryOp::Or => self.eval_constant_u128_operations(lhs, rhs)?, + BinaryOp::Xor => self.eval_constant_u128_operations(lhs, rhs)?, + BinaryOp::Shl => self.eval_constant_u128_operations(lhs, rhs)?, + BinaryOp::Shr => self.eval_constant_u128_operations(lhs, rhs)?, + }; + // TODO: Keep original type of constant + Some(dfg.make_constant(value, operand_type)) + } + + /// Try to evaluate the given operands as u128s for operators that are only valid on u128s, + /// like the bitwise operators and modulus. + fn eval_constant_u128_operations( + &self, + lhs: FieldElement, + rhs: FieldElement, + ) -> Option { + let lhs = lhs.try_into_u128()?; + let rhs = rhs.try_into_u128()?; + match self.operator { + BinaryOp::Mod => Some((lhs % rhs).into()), + BinaryOp::And => Some((lhs & rhs).into()), + BinaryOp::Or => Some((lhs | rhs).into()), + BinaryOp::Shr => Some((lhs >> rhs).into()), + // Check for overflow and return None if anything does overflow + BinaryOp::Shl => { + let rhs = rhs.try_into().ok()?; + lhs.checked_shl(rhs).map(Into::into) + } + + // Converting a field xor to a u128 xor would be incorrect since we wouldn't have the + // extra bits of the field. So we don't optimize it here. + BinaryOp::Xor => None, + + op @ (BinaryOp::Add + | BinaryOp::Sub + | BinaryOp::Mul + | BinaryOp::Div + | BinaryOp::Eq + | BinaryOp::Lt) => panic!( + "eval_constant_u128_operations invalid for {op:?} use eval_constants instead" + ), + } + } } /// Binary Operations allowed in the IR. diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs index 50c97b765bb..c63cac520bf 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs @@ -9,6 +9,7 @@ use iter_extended::vecmap; use crate::ssa_refactor::{ ir::{ basic_block::BasicBlockId, + dfg::InsertInstructionResult, function::{Function, FunctionId}, instruction::{Instruction, InstructionId, TerminatorInstruction}, value::{Value, ValueId}, @@ -343,7 +344,8 @@ impl<'function> PerFunctionContext<'function> { let old_results = self.source_function.dfg.instruction_results(call_id); let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); let new_results = self.context.inline_function(ssa, function, &arguments); - Self::insert_new_instruction_results(&mut self.values, old_results, &new_results); + let new_results = InsertInstructionResult::Results(&new_results); + Self::insert_new_instruction_results(&mut self.values, old_results, new_results); } /// Push the given instruction from the source_function into the current block of the @@ -365,11 +367,20 @@ impl<'function> PerFunctionContext<'function> { fn insert_new_instruction_results( values: &mut HashMap, old_results: &[ValueId], - new_results: &[ValueId], + new_results: InsertInstructionResult, ) { assert_eq!(old_results.len(), new_results.len()); - for (old_result, new_result) in old_results.iter().zip(new_results) { - values.insert(*old_result, *new_result); + + match new_results { + InsertInstructionResult::SimplifiedTo(new_result) => { + values.insert(old_results[0], new_result); + } + InsertInstructionResult::Results(new_results) => { + for (old_result, new_result) in old_results.iter().zip(new_results) { + values.insert(*old_result, *new_result); + } + } + InsertInstructionResult::InstructionRemoved => (), } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs index f621503e59a..60379097523 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs @@ -11,6 +11,7 @@ use crate::ssa_refactor::ir::{ use super::{ ir::{ basic_block::BasicBlock, + dfg::InsertInstructionResult, instruction::{InstructionId, Intrinsic}, }, ssa_gen::Ssa, @@ -108,10 +109,8 @@ impl FunctionBuilder { &mut self, instruction: Instruction, ctrl_typevars: Option>, - ) -> &[ValueId] { - let id = self.current_function.dfg.make_instruction(instruction, ctrl_typevars); - self.current_function.dfg.insert_instruction_in_block(self.current_block, id); - self.current_function.dfg.instruction_results(id) + ) -> InsertInstructionResult { + self.current_function.dfg.insert_instruction(instruction, self.current_block, ctrl_typevars) } /// Switch to inserting instructions in the given block. @@ -130,7 +129,7 @@ impl FunctionBuilder { /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. pub(crate) fn insert_allocate(&mut self, size_to_allocate: u32) -> ValueId { - self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None)[0] + self.insert_instruction(Instruction::Allocate { size: size_to_allocate }, None).first() } /// Insert a Load instruction at the end of the current block, loading from the given offset @@ -147,7 +146,7 @@ impl FunctionBuilder { type_to_load: Type, ) -> ValueId { address = self.insert_binary(address, BinaryOp::Add, offset); - self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load]))[0] + self.insert_instruction(Instruction::Load { address }, Some(vec![type_to_load])).first() } /// Insert a Store instruction at the end of the current block, storing the given element @@ -166,19 +165,19 @@ impl FunctionBuilder { rhs: ValueId, ) -> ValueId { let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); - self.insert_instruction(instruction, None)[0] + self.insert_instruction(instruction, None).first() } /// Insert a not instruction at the end of the current block. /// Returns the result of the instruction. pub(crate) fn insert_not(&mut self, rhs: ValueId) -> ValueId { - self.insert_instruction(Instruction::Not(rhs), None)[0] + self.insert_instruction(Instruction::Not(rhs), None).first() } /// Insert a cast instruction at the end of the current block. /// Returns the result of the cast instruction. pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { - self.insert_instruction(Instruction::Cast(value, typ), None)[0] + self.insert_instruction(Instruction::Cast(value, typ), None).first() } /// Insert a constrain instruction at the end of the current block. @@ -194,7 +193,7 @@ impl FunctionBuilder { arguments: Vec, result_types: Vec, ) -> &[ValueId] { - self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)) + self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)).results() } /// Terminates the current block with the given terminator instruction From a830ce5c3b247a7a60d7d4de4f8470e3227b8a47 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Tue, 16 May 2023 15:41:57 +0200 Subject: [PATCH 65/66] chore(ssa): enable cse for assert (#1350) enable cse for assert --- crates/noirc_evaluator/src/ssa/node.rs | 30 +++++++++++++++++++ .../noirc_evaluator/src/ssa/optimizations.rs | 16 ++++++++++ 2 files changed, 46 insertions(+) diff --git a/crates/noirc_evaluator/src/ssa/node.rs b/crates/noirc_evaluator/src/ssa/node.rs index bec3c923a6d..4566d974813 100644 --- a/crates/noirc_evaluator/src/ssa/node.rs +++ b/crates/noirc_evaluator/src/ssa/node.rs @@ -506,6 +506,36 @@ impl Instruction { } } } + + pub(crate) fn get_location(&self) -> Option { + match &self.operation { + Operation::Binary(bin) => match bin.operator { + BinaryOp::Udiv(location) + | BinaryOp::Sdiv(location) + | BinaryOp::Urem(location) + | BinaryOp::Srem(location) + | BinaryOp::Div(location) + | BinaryOp::Shr(location) => Some(location), + _ => None, + }, + Operation::Call { location, .. } => Some(*location), + Operation::Load { location, .. } + | Operation::Store { location, .. } + | Operation::Constrain(_, location) => *location, + Operation::Cast(_) + | Operation::Truncate { .. } + | Operation::Not(_) + | Operation::Jne(_, _) + | Operation::Jeq(_, _) + | Operation::Jmp(_) + | Operation::Phi { .. } + | Operation::Return(_) + | Operation::Result { .. } + | Operation::Cond { .. } + | Operation::Intrinsic(_, _) + | Operation::Nop => None, + } + } } //adapted from LLVM IR diff --git a/crates/noirc_evaluator/src/ssa/optimizations.rs b/crates/noirc_evaluator/src/ssa/optimizations.rs index f1cfca9c243..d238ae7b0fe 100644 --- a/crates/noirc_evaluator/src/ssa/optimizations.rs +++ b/crates/noirc_evaluator/src/ssa/optimizations.rs @@ -507,6 +507,22 @@ fn cse_block_with_anchor( new_list.push(*ins_id); } } + Operation::Constrain(condition, location) => { + if let Some(similar) = anchor.find_similar_instruction(&operator) { + assert_ne!(similar, ins.id); + *modified = true; + let similar_ins = ctx + .try_get_mut_instruction(similar) + .expect("Similar instructions are instructions"); + if location.is_some() && similar_ins.get_location().is_none() { + similar_ins.operation = Operation::Constrain(*condition, *location); + } + new_mark = Mark::ReplaceWith(similar); + } else { + new_list.push(*ins_id); + anchor.push_front(&ins.operation, *ins_id); + } + } _ => { //TODO: checks we do not need to propagate res arguments new_list.push(*ins_id); From dffa3c50337ec0f71a62377d985ebdc8eefe490e Mon Sep 17 00:00:00 2001 From: joss-aztec <94053499+joss-aztec@users.noreply.github.com> Date: Tue, 16 May 2023 15:59:12 +0100 Subject: [PATCH 66/66] feat(nargo)!: retire print-acir in favour of flag (#1328) feat!(nargo): retire print-acir in favour of flag --- crates/nargo_cli/src/cli/mod.rs | 8 ++--- crates/nargo_cli/src/cli/print_acir_cmd.rs | 35 ---------------------- crates/nargo_cli/tests/prove_and_verify.rs | 2 +- crates/noirc_driver/src/lib.rs | 34 +++++++++++++++++++-- 4 files changed, 35 insertions(+), 44 deletions(-) delete mode 100644 crates/nargo_cli/src/cli/print_acir_cmd.rs diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index d41dc1a815a..bdb9a926991 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -15,7 +15,6 @@ mod compile_cmd; mod execute_cmd; mod gates_cmd; mod new_cmd; -mod print_acir_cmd; mod prove_cmd; mod test_cmd; mod verify_cmd; @@ -56,7 +55,6 @@ enum NargoCommand { Verify(verify_cmd::VerifyCommand), Test(test_cmd::TestCommand), Gates(gates_cmd::GatesCommand), - PrintAcir(print_acir_cmd::PrintAcirCommand), } pub fn start_cli() -> eyre::Result<()> { @@ -79,18 +77,18 @@ pub fn start_cli() -> eyre::Result<()> { NargoCommand::Test(args) => test_cmd::run(&backend, args, config), NargoCommand::Gates(args) => gates_cmd::run(&backend, args, config), NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), - NargoCommand::PrintAcir(args) => print_acir_cmd::run(&backend, args, config), }?; Ok(()) } // helper function which tests noir programs by trying to generate a proof and verify it -pub fn prove_and_verify(proof_name: &str, program_dir: &Path, show_ssa: bool) -> bool { +pub fn prove_and_verify(proof_name: &str, program_dir: &Path) -> bool { let backend = crate::backends::ConcreteBackend::default(); let compile_options = CompileOptions { - show_ssa, + show_ssa: false, + print_acir: false, allow_warnings: false, show_output: false, experimental_ssa: false, diff --git a/crates/nargo_cli/src/cli/print_acir_cmd.rs b/crates/nargo_cli/src/cli/print_acir_cmd.rs deleted file mode 100644 index 420c57c6a08..00000000000 --- a/crates/nargo_cli/src/cli/print_acir_cmd.rs +++ /dev/null @@ -1,35 +0,0 @@ -use acvm::Backend; -use clap::Args; -use noirc_driver::CompileOptions; -use std::path::Path; - -use crate::cli::compile_cmd::compile_circuit; -use crate::errors::CliError; - -use super::NargoConfig; - -/// Prints out the ACIR for a compiled circuit -#[derive(Debug, Clone, Args)] -pub(crate) struct PrintAcirCommand { - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &B, - args: PrintAcirCommand, - config: NargoConfig, -) -> Result<(), CliError> { - print_acir_with_path(backend, config.program_dir, &args.compile_options) -} - -fn print_acir_with_path>( - backend: &B, - program_dir: P, - compile_options: &CompileOptions, -) -> Result<(), CliError> { - let compiled_program = compile_circuit(backend, program_dir.as_ref(), compile_options)?; - println!("{}", compiled_program.circuit); - - Ok(()) -} diff --git a/crates/nargo_cli/tests/prove_and_verify.rs b/crates/nargo_cli/tests/prove_and_verify.rs index 070db6d8ce8..288073e6c1e 100644 --- a/crates/nargo_cli/tests/prove_and_verify.rs +++ b/crates/nargo_cli/tests/prove_and_verify.rs @@ -80,7 +80,7 @@ mod tests { println!("Running test {test_name}"); let verified = std::panic::catch_unwind(|| { - nargo_cli::cli::prove_and_verify("pp", test_program_dir, false) + nargo_cli::cli::prove_and_verify("pp", test_program_dir) }); let r = match verified { diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index a2fbed21885..c88a1a02b2b 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -36,6 +36,10 @@ pub struct CompileOptions { #[arg(short, long)] pub show_ssa: bool, + /// Display the ACIR for compiled circuit + #[arg(short, long)] + pub print_acir: bool, + /// Issue a warning for each unused variable instead of an error #[arg(short, long)] pub allow_warnings: bool, @@ -51,7 +55,13 @@ pub struct CompileOptions { impl Default for CompileOptions { fn default() -> Self { - Self { show_ssa: false, allow_warnings: false, show_output: true, experimental_ssa: false } + Self { + show_ssa: false, + print_acir: false, + allow_warnings: false, + show_output: true, + experimental_ssa: false, + } } } @@ -188,7 +198,12 @@ impl Driver { return Err(e); } }; - self.compile_no_check(options, main) + let compiled_program = self.compile_no_check(options, main)?; + if options.print_acir { + println!("Compiled ACIR for main:"); + println!("{}", compiled_program.circuit); + } + Ok(compiled_program) } /// Run the frontend to check the crate for errors then compile all contracts if there were none @@ -198,7 +213,20 @@ impl Driver { ) -> Result, ReportedError> { self.check_crate(options)?; let contracts = self.get_all_contracts(); - try_vecmap(contracts, |contract| self.compile_contract(contract, options)) + let compiled_contracts = + try_vecmap(contracts, |contract| self.compile_contract(contract, options))?; + if options.print_acir { + for compiled_contract in &compiled_contracts { + for contract_function in &compiled_contract.functions { + println!( + "Compiled ACIR for {}::{}:", + compiled_contract.name, contract_function.name + ); + println!("{}", contract_function.bytecode); + } + } + } + Ok(compiled_contracts) } /// Compile all of the functions associated with a Noir contract.