Skip to content

Commit

Permalink
fix: prevent comptime println from crashing LSP (noir-lang/noir#5918)
Browse files Browse the repository at this point in the history
chore: remove equality operation on boolean constraints against constants (noir-lang/noir#5919)
feat(perf): Remove last store in return block if last load is before that store (noir-lang/noir#5910)
chore: Add pass to normalize Ids in SSA (noir-lang/noir#5909)
feat: Sync from aztec-packages (noir-lang/noir#5917)
chore: bump some dependencies (noir-lang/noir#5893)
chore: make nested slice error more clear for `[[T]; N]` case (noir-lang/noir#5906)
feat: better println for Quoted (noir-lang/noir#5896)
feat: LSP diagnostics for all package files (noir-lang/noir#5895)
feat: LSP code action "Fill struct fields" (noir-lang/noir#5885)
chore: Cleanup str_as_bytes (noir-lang/noir#5900)
chore: update git user for release PRs (noir-lang/noir#5894)
  • Loading branch information
AztecBot committed Sep 4, 2024
2 parents e318760 + fdaeff9 commit 94071ca
Show file tree
Hide file tree
Showing 13 changed files with 303 additions and 34 deletions.
2 changes: 1 addition & 1 deletion .noir-sync-commit
Original file line number Diff line number Diff line change
@@ -1 +1 @@
33bd102d6021912b56fe880efab65346c3ea9228
44cf9a2140bc06b550d4b46966f1637598ac11a7
Original file line number Diff line number Diff line change
Expand Up @@ -270,16 +270,40 @@ impl<'block> BrilligBlock<'block> {
self.convert_ssa_binary(binary, dfg, result_var);
}
Instruction::Constrain(lhs, rhs, assert_message) => {
let condition = SingleAddrVariable {
address: self.brillig_context.allocate_register(),
bit_size: 1,
let (condition, deallocate) = match (
dfg.get_numeric_constant_with_type(*lhs),
dfg.get_numeric_constant_with_type(*rhs),
) {
// If the constraint is of the form `x == u1 1` then we can simply constrain `x` directly
(
Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))),
None,
) if constant == FieldElement::one() => {
(self.convert_ssa_single_addr_value(*rhs, dfg), false)
}
(
None,
Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))),
) if constant == FieldElement::one() => {
(self.convert_ssa_single_addr_value(*lhs, dfg), false)
}

// Otherwise we need to perform the equality explicitly.
_ => {
let condition = SingleAddrVariable {
address: self.brillig_context.allocate_register(),
bit_size: 1,
};
self.convert_ssa_binary(
&Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq },
dfg,
condition,
);

(condition, true)
}
};

self.convert_ssa_binary(
&Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq },
dfg,
condition,
);
match assert_message {
Some(ConstrainError::Dynamic(selector, values)) => {
let payload_values =
Expand All @@ -302,7 +326,9 @@ impl<'block> BrilligBlock<'block> {
self.brillig_context.codegen_constrain(condition, None);
}
}
self.brillig_context.deallocate_single_addr(condition);
if deallocate {
self.brillig_context.deallocate_single_addr(condition);
}
}
Instruction::Allocate => {
let result_value = dfg.instruction_results(instruction_id)[0];
Expand Down
4 changes: 3 additions & 1 deletion noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ pub(crate) fn optimize_into_acir(
) -> Result<ArtifactsAndWarnings, RuntimeError> {
let ssa_gen_span = span!(Level::TRACE, "ssa_generation");
let ssa_gen_span_guard = ssa_gen_span.enter();

let mut ssa = SsaBuilder::new(
program,
options.enable_ssa_logging,
Expand Down Expand Up @@ -418,8 +419,9 @@ impl SsaBuilder {
Ok(self.print(msg))
}

fn print(self, msg: &str) -> Self {
fn print(mut self, msg: &str) -> Self {
if self.print_ssa_passes {
self.ssa.normalize_ids();
println!("{msg}\n{}", self.ssa);
}
self
Expand Down
10 changes: 6 additions & 4 deletions noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -770,10 +770,12 @@ impl<'a> Context<'a> {
.map(|result_id| dfg.type_of_value(*result_id).flattened_size())
.sum();

let acir_function_id = ssa
.entry_point_to_generated_index
.get(id)
.expect("ICE: should have an associated final index");
let Some(acir_function_id) =
ssa.entry_point_to_generated_index.get(id)
else {
unreachable!("Expected an associated final index for call to acir function {id} with args {arguments:?}");
};

let output_vars = self.acir_context.call_acir_function(
AcirFunctionId(*acir_function_id),
inputs,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ impl Context {
}
},
Value::ForeignFunction(..) => {
panic!("Should not be able to reach foreign function from non-brillig functions");
panic!("Should not be able to reach foreign function from non-brillig functions, {func_id} in function {}", function.name());
}
Value::Array { .. }
| Value::Instruction { .. }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,13 @@ impl Function {
Self { name: another.name.clone(), id, entry_block, dfg, runtime: another.runtime }
}

/// Takes the signature (function name & runtime) from a function but does not copy the body.
pub(crate) fn clone_signature(id: FunctionId, another: &Function) -> Self {
let mut new_function = Function::new(another.name.clone(), id);
new_function.runtime = another.runtime;
new_function
}

/// The name of the function.
/// Used exclusively for debugging purposes.
pub(crate) fn name(&self) -> &str {
Expand Down
10 changes: 8 additions & 2 deletions noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use fxhash::FxHashMap as HashMap;
use serde::{Deserialize, Serialize};
use std::{
collections::BTreeMap,
hash::Hash,
str::FromStr,
sync::atomic::{AtomicUsize, Ordering},
Expand Down Expand Up @@ -240,7 +241,7 @@ impl<T> std::ops::IndexMut<Id<T>> for DenseMap<T> {
/// call to .remove().
#[derive(Debug)]
pub(crate) struct SparseMap<T> {
storage: HashMap<Id<T>, T>,
storage: BTreeMap<Id<T>, T>,
}

impl<T> SparseMap<T> {
Expand Down Expand Up @@ -271,11 +272,16 @@ impl<T> SparseMap<T> {
pub(crate) fn remove(&mut self, id: Id<T>) -> Option<T> {
self.storage.remove(&id)
}

/// Unwraps the inner storage of this map
pub(crate) fn into_btree(self) -> BTreeMap<Id<T>, T> {
self.storage
}
}

impl<T> Default for SparseMap<T> {
fn default() -> Self {
Self { storage: HashMap::default() }
Self { storage: Default::default() }
}
}

Expand Down
13 changes: 7 additions & 6 deletions noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
//! The purpose of this pass is to inline the instructions of each function call
//! within the function caller. If all function calls are known, there will only
//! be a single function remaining when the pass finishes.
use std::collections::{BTreeSet, HashSet};
use std::collections::{BTreeSet, HashSet, VecDeque};

use acvm::acir::AcirField;
use iter_extended::{btree_map, vecmap};
Expand Down Expand Up @@ -372,14 +372,14 @@ impl<'function> PerFunctionContext<'function> {
fn translate_block(
&mut self,
source_block: BasicBlockId,
block_queue: &mut Vec<BasicBlockId>,
block_queue: &mut VecDeque<BasicBlockId>,
) -> BasicBlockId {
if let Some(block) = self.blocks.get(&source_block) {
return *block;
}

// The block is not yet inlined, queue it
block_queue.push(source_block);
block_queue.push_back(source_block);

// The block is not already present in the function being inlined into so we must create it.
// The block's instructions are not copied over as they will be copied later in inlining.
Expand Down Expand Up @@ -415,13 +415,14 @@ impl<'function> PerFunctionContext<'function> {
/// Inline all reachable blocks within the source_function into the destination function.
fn inline_blocks(&mut self, ssa: &Ssa) -> Vec<ValueId> {
let mut seen_blocks = HashSet::new();
let mut block_queue = vec![self.source_function.entry_block()];
let mut block_queue = VecDeque::new();
block_queue.push_back(self.source_function.entry_block());

// This Vec will contain each block with a Return instruction along with the
// returned values of that block.
let mut function_returns = vec![];

while let Some(source_block_id) = block_queue.pop() {
while let Some(source_block_id) = block_queue.pop_front() {
if seen_blocks.contains(&source_block_id) {
continue;
}
Expand Down Expand Up @@ -609,7 +610,7 @@ impl<'function> PerFunctionContext<'function> {
fn handle_terminator_instruction(
&mut self,
block_id: BasicBlockId,
block_queue: &mut Vec<BasicBlockId>,
block_queue: &mut VecDeque<BasicBlockId>,
) -> Option<(BasicBlockId, Vec<ValueId>)> {
match self.source_function.dfg[block_id].unwrap_terminator() {
TerminatorInstruction::Jmp { destination, arguments, call_stack } => {
Expand Down
32 changes: 27 additions & 5 deletions noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ struct PerFunctionContext<'f> {

/// Track a value's last load across all blocks.
/// If a value is not used in anymore loads we can remove the last store to that value.
last_loads: HashMap<ValueId, InstructionId>,
last_loads: HashMap<ValueId, (InstructionId, BasicBlockId)>,
}

impl<'f> PerFunctionContext<'f> {
Expand Down Expand Up @@ -152,9 +152,31 @@ impl<'f> PerFunctionContext<'f> {
// This rule does not apply to reference parameters, which we must also check for before removing these stores.
for (block_id, block) in self.blocks.iter() {
let block_params = self.inserter.function.dfg.block_parameters(*block_id);
for (value, store_instruction) in block.last_stores.iter() {
let is_reference_param = block_params.contains(value);
if self.last_loads.get(value).is_none() && !is_reference_param {
for (store_address, store_instruction) in block.last_stores.iter() {
let is_reference_param = block_params.contains(store_address);
let terminator = self.inserter.function.dfg[*block_id].unwrap_terminator();

let is_return = matches!(terminator, TerminatorInstruction::Return { .. });
let remove_load = if is_return {
// Determine whether the last store is used in the return value
let mut is_return_value = false;
terminator.for_each_value(|return_value| {
is_return_value = return_value == *store_address || is_return_value;
});

// If the last load of a store is not part of the block with a return terminator,
// we can safely remove this store.
let last_load_not_in_return = self
.last_loads
.get(store_address)
.map(|(_, last_load_block)| *last_load_block != *block_id)
.unwrap_or(true);
!is_return_value && last_load_not_in_return
} else {
self.last_loads.get(store_address).is_none()
};

if remove_load && !is_reference_param {
self.instructions_to_remove.insert(*store_instruction);
}
}
Expand Down Expand Up @@ -259,7 +281,7 @@ impl<'f> PerFunctionContext<'f> {
} else {
references.mark_value_used(address, self.inserter.function);

self.last_loads.insert(address, instruction);
self.last_loads.insert(address, (instruction, block_id));
}
}
Instruction::Store { address, value } => {
Expand Down
1 change: 1 addition & 0 deletions noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ mod die;
pub(crate) mod flatten_cfg;
mod inlining;
mod mem2reg;
mod normalize_value_ids;
mod rc;
mod remove_bit_shifts;
mod remove_enable_side_effects;
Expand Down
Loading

0 comments on commit 94071ca

Please sign in to comment.