From df73fe2f345422516bfa01462c0c76d3b924b772 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Fri, 5 Jul 2024 15:22:12 +0200 Subject: [PATCH 1/5] fix: Complete call stacks with no_predicates (#5418) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description ## Problem\* Consider the following code: ```rust fn inner(input: Field) { assert_eq(input, 27); } #[no_predicates] fn no_predicates(input: Field) { inner(input) } fn outer_pass_through(input: Field) { no_predicates(input) } fn main(input: Field) { outer_pass_through(input) } ``` This is the call stack with `#[no_predicates]`: ``` ┌─ /mnt/user-data/alvaro/constructor/src/main.nr:23:5 │ 23 │ assert_eq(input, 27); │ -------------------- │ = Call stack: 1. /mnt/user-data/alvaro/constructor/src/main.nr:32:5 2. /mnt/user-data/alvaro/constructor/src/main.nr:28:5 3. /mnt/user-data/alvaro/constructor/src/main.nr:23:5 ``` This is the call stack without `#[no_predicates]`: ``` error: Failed constraint ┌─ /mnt/user-data/alvaro/constructor/src/main.nr:23:5 │ 23 │ assert_eq(input, 27); │ -------------------- │ = Call stack: 1. /mnt/user-data/alvaro/constructor/src/main.nr:35:5 2. /mnt/user-data/alvaro/constructor/src/main.nr:31:5 3. /mnt/user-data/alvaro/constructor/src/main.nr:27:5 4. /mnt/user-data/alvaro/constructor/src/main.nr:23:5 ``` Inlining with no predicates was eating up the `outer_pass_through` call. ## Summary\* The inliner now doesn't assume that there is at most one location in the source call stack. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/opt/inlining.rs | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index e2a7f51d0a0..7dda0ac7787 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -517,19 +517,13 @@ impl<'function> PerFunctionContext<'function> { let old_results = self.source_function.dfg.instruction_results(call_id); let arguments = vecmap(arguments, |arg| self.translate_value(*arg)); - let mut call_stack = self.source_function.dfg.get_call_stack(call_id); - let has_location = !call_stack.is_empty(); - - // Function calls created by the defunctionalization pass will not have source locations - if let Some(location) = call_stack.pop_back() { - self.context.call_stack.push_back(location); - } + let call_stack = self.source_function.dfg.get_call_stack(call_id); + let call_stack_len = call_stack.len(); + self.context.call_stack.append(call_stack); let new_results = self.context.inline_function(ssa, function, &arguments); - if has_location { - self.context.call_stack.pop_back(); - } + self.context.call_stack.truncate(self.context.call_stack.len() - call_stack_len); let new_results = InsertInstructionResult::Results(call_id, &new_results); Self::insert_new_instruction_results(&mut self.values, old_results, new_results); From 44abe7fd99bd728929370026940e0e69e6ce876f Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 5 Jul 2024 16:46:19 +0100 Subject: [PATCH 2/5] chore: add test and benchmarks for poseidon2 (#5386) # Description ## Problem\* Resolves ## Summary\* This PR adds benchmarks for poseidon2 ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../cryptographic_primitives/hashes.mdx | 4 +- .../bench_poseidon2_hash/Nargo.toml | 7 ++ .../bench_poseidon2_hash/Prover.toml | 1 + .../bench_poseidon2_hash/src/main.nr | 5 + .../bench_poseidon2_hash_100/Nargo.toml | 7 ++ .../bench_poseidon2_hash_100/Prover.toml | 102 ++++++++++++++++++ .../bench_poseidon2_hash_100/src/main.nr | 12 +++ .../bench_poseidon2_hash_30/Nargo.toml | 7 ++ .../bench_poseidon2_hash_30/Prover.toml | 32 ++++++ .../bench_poseidon2_hash_30/src/main.nr | 12 +++ .../bench_poseidon_hash_100/Nargo.toml | 7 ++ .../bench_poseidon_hash_100/Prover.toml | 102 ++++++++++++++++++ .../bench_poseidon_hash_100/src/main.nr | 12 +++ .../bench_poseidon_hash_30/Nargo.toml | 7 ++ .../bench_poseidon_hash_30/Prover.toml | 32 ++++++ .../bench_poseidon_hash_30/src/main.nr | 12 +++ .../execution_success/poseidon2/Nargo.toml | 6 ++ .../execution_success/poseidon2/Prover.toml | 5 + .../execution_success/poseidon2/src/main.nr | 8 ++ .../poseidon_bn254_hash/Prover.toml | 5 - .../poseidon_bn254_hash/src/main.nr | 6 +- 21 files changed, 380 insertions(+), 11 deletions(-) create mode 100644 test_programs/benchmarks/bench_poseidon2_hash/Nargo.toml create mode 100644 test_programs/benchmarks/bench_poseidon2_hash/Prover.toml create mode 100644 test_programs/benchmarks/bench_poseidon2_hash/src/main.nr create mode 100644 test_programs/benchmarks/bench_poseidon2_hash_100/Nargo.toml create mode 100644 test_programs/benchmarks/bench_poseidon2_hash_100/Prover.toml create mode 100644 test_programs/benchmarks/bench_poseidon2_hash_100/src/main.nr create mode 100644 test_programs/benchmarks/bench_poseidon2_hash_30/Nargo.toml create mode 100644 test_programs/benchmarks/bench_poseidon2_hash_30/Prover.toml create mode 100644 test_programs/benchmarks/bench_poseidon2_hash_30/src/main.nr create mode 100644 test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Nargo.toml create mode 100644 test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Prover.toml create mode 100644 test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/src/main.nr create mode 100644 test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Nargo.toml create mode 100644 test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Prover.toml create mode 100644 test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/src/main.nr create mode 100644 test_programs/execution_success/poseidon2/Nargo.toml create mode 100644 test_programs/execution_success/poseidon2/Prover.toml create mode 100644 test_programs/execution_success/poseidon2/src/main.nr diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx index 05a2bb983a1..0abd7a12a22 100644 --- a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx +++ b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -127,7 +127,9 @@ function, there is only one hash and you can specify a message_size to hash only Poseidon2::hash(input, 3); ``` -The above example for Poseidon also includes Poseidon2. +example: + +#include_code poseidon2 test_programs/execution_success/poseidon2/src/main.nr rust ## mimc_bn254 and mimc diff --git a/test_programs/benchmarks/bench_poseidon2_hash/Nargo.toml b/test_programs/benchmarks/bench_poseidon2_hash/Nargo.toml new file mode 100644 index 00000000000..47bf6af3c27 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bench_poseidon2_hash" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/benchmarks/bench_poseidon2_hash/Prover.toml b/test_programs/benchmarks/bench_poseidon2_hash/Prover.toml new file mode 100644 index 00000000000..66779dea9d7 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash/Prover.toml @@ -0,0 +1 @@ +input = [1,2] diff --git a/test_programs/benchmarks/bench_poseidon2_hash/src/main.nr b/test_programs/benchmarks/bench_poseidon2_hash/src/main.nr new file mode 100644 index 00000000000..a3528253f5d --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash/src/main.nr @@ -0,0 +1,5 @@ +use std::hash::poseidon2; + +fn main(input: [Field; 2]) -> pub Field { + poseidon2::Poseidon2::hash(input, input.len()) +} diff --git a/test_programs/benchmarks/bench_poseidon2_hash_100/Nargo.toml b/test_programs/benchmarks/bench_poseidon2_hash_100/Nargo.toml new file mode 100644 index 00000000000..4a8bd608939 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash_100/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bench_poseidon2_hash_100" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/benchmarks/bench_poseidon2_hash_100/Prover.toml b/test_programs/benchmarks/bench_poseidon2_hash_100/Prover.toml new file mode 100644 index 00000000000..542b4a08dd6 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash_100/Prover.toml @@ -0,0 +1,102 @@ +input = [ + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], +] \ No newline at end of file diff --git a/test_programs/benchmarks/bench_poseidon2_hash_100/src/main.nr b/test_programs/benchmarks/bench_poseidon2_hash_100/src/main.nr new file mode 100644 index 00000000000..39c714e524f --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash_100/src/main.nr @@ -0,0 +1,12 @@ +use std::hash::poseidon2; + +global SIZE = 100; + +fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { + let mut results: [Field; SIZE] = [0; SIZE]; + for i in 0..SIZE { + results[i] = poseidon2::Poseidon2::hash(input[i], 2); + } + + results +} diff --git a/test_programs/benchmarks/bench_poseidon2_hash_30/Nargo.toml b/test_programs/benchmarks/bench_poseidon2_hash_30/Nargo.toml new file mode 100644 index 00000000000..0a245bb572e --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash_30/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bench_poseidon2_hash_30" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/benchmarks/bench_poseidon2_hash_30/Prover.toml b/test_programs/benchmarks/bench_poseidon2_hash_30/Prover.toml new file mode 100644 index 00000000000..7792a9ab8e3 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash_30/Prover.toml @@ -0,0 +1,32 @@ +input = [ + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], +] \ No newline at end of file diff --git a/test_programs/benchmarks/bench_poseidon2_hash_30/src/main.nr b/test_programs/benchmarks/bench_poseidon2_hash_30/src/main.nr new file mode 100644 index 00000000000..d1251a4c853 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon2_hash_30/src/main.nr @@ -0,0 +1,12 @@ +use std::hash::poseidon2; + +global SIZE = 30; + +fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { + let mut results: [Field; SIZE] = [0; SIZE]; + for i in 0..SIZE { + results[i] = poseidon2::Poseidon2::hash(input[i], 2); + } + + results +} diff --git a/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Nargo.toml b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Nargo.toml new file mode 100644 index 00000000000..b23716b2a20 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bench_poseidon_hash_100" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Prover.toml b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Prover.toml new file mode 100644 index 00000000000..542b4a08dd6 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/Prover.toml @@ -0,0 +1,102 @@ +input = [ + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], +] \ No newline at end of file diff --git a/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/src/main.nr b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/src/main.nr new file mode 100644 index 00000000000..1c9bbfe61bf --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_100/src/main.nr @@ -0,0 +1,12 @@ +use std::hash; + +global SIZE = 100; + +fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { + let mut results: [Field; SIZE] = [0; SIZE]; + for i in 0..SIZE { + results[i] = hash::poseidon::bn254::hash_2(input[i]); + } + + results +} diff --git a/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Nargo.toml b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Nargo.toml new file mode 100644 index 00000000000..dbcbc07b1ba --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "bench_poseidon_hash_30" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Prover.toml b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Prover.toml new file mode 100644 index 00000000000..7792a9ab8e3 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/Prover.toml @@ -0,0 +1,32 @@ +input = [ + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], + [1,2], +] \ No newline at end of file diff --git a/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/src/main.nr b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/src/main.nr new file mode 100644 index 00000000000..3edb47e9f72 --- /dev/null +++ b/test_programs/benchmarks/bench_poseidon_hash/bench_poseidon_hash_30/src/main.nr @@ -0,0 +1,12 @@ +use std::hash; + +global SIZE = 30; + +fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { + let mut results: [Field; SIZE] = [0; SIZE]; + for i in 0..SIZE { + results[i] = hash::poseidon::bn254::hash_2(input[i]); + } + + results +} diff --git a/test_programs/execution_success/poseidon2/Nargo.toml b/test_programs/execution_success/poseidon2/Nargo.toml new file mode 100644 index 00000000000..ad812b40398 --- /dev/null +++ b/test_programs/execution_success/poseidon2/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "poseidon2" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/poseidon2/Prover.toml b/test_programs/execution_success/poseidon2/Prover.toml new file mode 100644 index 00000000000..b795ec36904 --- /dev/null +++ b/test_programs/execution_success/poseidon2/Prover.toml @@ -0,0 +1,5 @@ +inputs = ["4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094"] +expected_hash = "0x2f43a0f83b51a6f5fc839dea0ecec74947637802a579fa9841930a25a0bcec11" diff --git a/test_programs/execution_success/poseidon2/src/main.nr b/test_programs/execution_success/poseidon2/src/main.nr new file mode 100644 index 00000000000..3186617bfc8 --- /dev/null +++ b/test_programs/execution_success/poseidon2/src/main.nr @@ -0,0 +1,8 @@ +// docs:start:poseidon2 +use std::hash::poseidon2; + +fn main(inputs: [Field; 4], expected_hash: Field) { + let hash = poseidon2::Poseidon2::hash(inputs, inputs.len()); + assert_eq(hash, expected_hash); +} +// docs:end:poseidon2 diff --git a/test_programs/execution_success/poseidon_bn254_hash/Prover.toml b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml index fa6fd05b0a3..8eecf9a3db2 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/Prover.toml +++ b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml @@ -2,8 +2,3 @@ x1 = [1,2] y1 = "0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a" x2 = [1,2,3,4] y2 = "0x299c867db6c1fdd79dcefa40e4510b9837e60ebb1ce0663dbaa525df65250465" -x3 = ["4218458030232820015255714794613421442512497197372123294583664908262453897094", - "4218458030232820015255714794613421442512497197372123294583664908262453897094", - "4218458030232820015255714794613421442512497197372123294583664908262453897094", - "4218458030232820015255714794613421442512497197372123294583664908262453897094"] - y3 = "0x2f43a0f83b51a6f5fc839dea0ecec74947637802a579fa9841930a25a0bcec11" diff --git a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr index cf1c190e5c9..5ea02d53e96 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr +++ b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr @@ -1,15 +1,11 @@ // docs:start:poseidon use std::hash::poseidon; -use std::hash::poseidon2; -fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { let hash1 = poseidon::bn254::hash_2(x1); assert(hash1 == y1); let hash2 = poseidon::bn254::hash_4(x2); assert(hash2 == y2); - - let hash3 = poseidon2::Poseidon2::hash(x3, x3.len()); - assert(hash3 == y3); } // docs:end:poseidon From 688448f656a14f1160f83ea201db69805e80d053 Mon Sep 17 00:00:00 2001 From: jfecher Date: Fri, 5 Jul 2024 13:14:15 -0500 Subject: [PATCH 3/5] chore: Add remaining slice methods to the interpreter (#5422) # Description ## Problem\* ## Summary\* Adds the remaining builtin slice methods to the interpreter. Also adds `is_unconstrained` which always returns true. This way `comptime` can also take advantage of any `if is_unconstrained()` optimizations (like `break`) used in unconstrained code in called functions. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/hir/comptime/interpreter/builtin.rs | 137 ++++++++++++++++-- .../noir/standard_library/is_unconstrained.md | 12 +- .../comptime_slice_methods/Nargo.toml | 7 + .../comptime_slice_methods/src/main.nr | 27 ++++ tooling/nargo_cli/build.rs | 3 +- 5 files changed, 170 insertions(+), 16 deletions(-) create mode 100644 test_programs/compile_success_empty/comptime_slice_methods/Nargo.toml create mode 100644 test_programs/compile_success_empty/comptime_slice_methods/src/main.nr diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 89c0b1d438e..399d9905269 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -3,8 +3,9 @@ use std::rc::Rc; use noirc_errors::Location; use crate::{ + ast::IntegerBitSize, hir::comptime::{errors::IResult, InterpreterError, Value}, - macros_api::NodeInterner, + macros_api::{NodeInterner, Signedness}, token::{SpannedToken, Token, Tokens}, QuotedType, Type, }; @@ -18,10 +19,16 @@ pub(super) fn call_builtin( match name { "array_len" => array_len(interner, arguments, location), "as_slice" => as_slice(interner, arguments, location), + "is_unconstrained" => Ok(Value::Bool(true)), + "slice_insert" => slice_insert(interner, arguments, location), + "slice_pop_back" => slice_pop_back(interner, arguments, location), + "slice_pop_front" => slice_pop_front(interner, arguments, location), "slice_push_back" => slice_push_back(interner, arguments, location), + "slice_push_front" => slice_push_front(interner, arguments, location), + "slice_remove" => slice_remove(interner, arguments, location), "struct_def_as_type" => struct_def_as_type(interner, arguments, location), - "struct_def_generics" => struct_def_generics(interner, arguments, location), "struct_def_fields" => struct_def_fields(interner, arguments, location), + "struct_def_generics" => struct_def_generics(interner, arguments, location), _ => { let item = format!("Comptime evaluation for builtin function {name}"); Err(InterpreterError::Unimplemented { item, location }) @@ -42,6 +49,36 @@ fn check_argument_count( } } +fn failing_constraint(message: impl Into, location: Location) -> IResult { + let message = Some(Value::String(Rc::new(message.into()))); + Err(InterpreterError::FailingConstraint { message, location }) +} + +fn get_slice( + interner: &NodeInterner, + value: Value, + location: Location, +) -> IResult<(im::Vector, Type)> { + match value { + Value::Slice(values, typ) => Ok((values, typ)), + value => { + let type_var = Box::new(interner.next_type_variable()); + let expected = Type::Slice(type_var); + Err(InterpreterError::TypeMismatch { expected, value, location }) + } + } +} + +fn get_u32(value: Value, location: Location) -> IResult { + match value { + Value::U32(value) => Ok(value), + value => { + let expected = Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo); + Err(InterpreterError::TypeMismatch { expected, value, location }) + } + } +} + fn array_len( interner: &NodeInterner, mut arguments: Vec<(Value, Location)>, @@ -85,18 +122,9 @@ fn slice_push_back( check_argument_count(2, &arguments, location)?; let (element, _) = arguments.pop().unwrap(); - let (slice, _) = arguments.pop().unwrap(); - match slice { - Value::Slice(mut values, typ) => { - values.push_back(element); - Ok(Value::Slice(values, typ)) - } - value => { - let type_var = Box::new(interner.next_type_variable()); - let expected = Type::Slice(type_var); - Err(InterpreterError::TypeMismatch { expected, value, location }) - } - } + let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + values.push_back(element); + Ok(Value::Slice(values, typ)) } /// fn as_type(self) -> Quoted @@ -198,3 +226,84 @@ fn struct_def_fields( ]))); Ok(Value::Slice(fields, typ)) } + +fn slice_remove( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> Result { + check_argument_count(2, &arguments, location)?; + + let index = get_u32(arguments.pop().unwrap().0, location)? as usize; + let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + + if values.is_empty() { + return failing_constraint("slice_remove called on empty slice", location); + } + + if index >= values.len() { + let message = format!( + "slice_remove: index {index} is out of bounds for a slice of length {}", + values.len() + ); + return failing_constraint(message, location); + } + + let element = values.remove(index); + Ok(Value::Tuple(vec![Value::Slice(values, typ), element])) +} + +fn slice_push_front( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> Result { + check_argument_count(2, &arguments, location)?; + + let (element, _) = arguments.pop().unwrap(); + let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + values.push_front(element); + Ok(Value::Slice(values, typ)) +} + +fn slice_pop_front( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> Result { + check_argument_count(1, &arguments, location)?; + + let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + match values.pop_front() { + Some(element) => Ok(Value::Tuple(vec![element, Value::Slice(values, typ)])), + None => failing_constraint("slice_pop_front called on empty slice", location), + } +} + +fn slice_pop_back( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> Result { + check_argument_count(1, &arguments, location)?; + + let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + match values.pop_back() { + Some(element) => Ok(Value::Tuple(vec![Value::Slice(values, typ), element])), + None => failing_constraint("slice_pop_back called on empty slice", location), + } +} + +fn slice_insert( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> Result { + check_argument_count(3, &arguments, location)?; + + let (element, _) = arguments.pop().unwrap(); + let index = get_u32(arguments.pop().unwrap().0, location)?; + let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + values.insert(index as usize, element); + Ok(Value::Slice(values, typ)) +} diff --git a/docs/docs/noir/standard_library/is_unconstrained.md b/docs/docs/noir/standard_library/is_unconstrained.md index bb157e719dc..51bb1bda8f1 100644 --- a/docs/docs/noir/standard_library/is_unconstrained.md +++ b/docs/docs/noir/standard_library/is_unconstrained.md @@ -56,4 +56,14 @@ pub fn external_interface(){ ``` -The is_unconstrained result is resolved at compile time, so in unconstrained contexts the compiler removes the else branch, and in constrained contexts the compiler removes the if branch, reducing the amount of compute necessary to run external_interface. \ No newline at end of file +The is_unconstrained result is resolved at compile time, so in unconstrained contexts the compiler removes the else branch, and in constrained contexts the compiler removes the if branch, reducing the amount of compute necessary to run external_interface. + +Note that using `is_unconstrained` in a `comptime` context will also return `true`: + +``` +fn main() { + comptime { + assert(is_unconstrained()); + } +} +``` diff --git a/test_programs/compile_success_empty/comptime_slice_methods/Nargo.toml b/test_programs/compile_success_empty/comptime_slice_methods/Nargo.toml new file mode 100644 index 00000000000..8ff281cf9e3 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_slice_methods/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_slice_methods" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/comptime_slice_methods/src/main.nr b/test_programs/compile_success_empty/comptime_slice_methods/src/main.nr new file mode 100644 index 00000000000..b76e8d70b83 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_slice_methods/src/main.nr @@ -0,0 +1,27 @@ +fn main() { + comptime + { + // Comptime scopes are counted as unconstrained + assert(std::runtime::is_unconstrained()); + + let slice = &[2]; + let slice = slice.push_back(3); + let slice = slice.push_front(1); + assert_eq(slice, &[1, 2, 3]); + + let slice = slice.insert(1, 10); + assert_eq(slice, &[1, 10, 2, 3]); + + let (slice, ten) = slice.remove(1); + assert_eq(slice, &[1, 2, 3]); + assert_eq(ten, 10); + + let (one, slice) = slice.pop_front(); + assert_eq(slice, &[2, 3]); + assert_eq(one, 1); + + let (slice, three) = slice.pop_back(); + assert_eq(slice, &[2]); + assert_eq(three, 3); + } +} diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 0fbdaaba0b4..faac228cb63 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -61,13 +61,14 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ /// Certain features are only available in the elaborator. /// We skip these tests for non-elaborator code since they are not /// expected to work there. This can be removed once the old code is removed. -const IGNORED_NEW_FEATURE_TESTS: [&str; 6] = [ +const IGNORED_NEW_FEATURE_TESTS: [&str; 7] = [ "macros", "wildcard_type", "type_definition_annotation", "numeric_generics_explicit", "derive_impl", "comptime_traits", + "comptime_slice_methods", ]; fn read_test_cases( From 7ea83a9de4d3096d27e79faf5d8081b9e9108c4a Mon Sep 17 00:00:00 2001 From: Innokentii Sennovskii Date: Fri, 5 Jul 2024 19:31:37 +0100 Subject: [PATCH 4/5] feat: Detect subgraphs that are completely independent from inputs or outputs (#5402) # Description ## Problem\* Currently a developer can create a circuit that would use unconstrained values in such a way, that a part of the circuit becomes completely disconnected from the circuit inputs or outputs, making this part completely useless as it can be taken out of any proof. This PR introduces an Ssa pass that informs the user of this issue. ## Summary\* An Ssa pass that detects some underconstraining issues ![image](https://github.com/noir-lang/noir/assets/4798775/cb5849d2-715b-4a6c-9947-575c2938f37a) ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_errors/src/reporter.rs | 26 +- compiler/noirc_evaluator/src/errors.rs | 20 + compiler/noirc_evaluator/src/ssa.rs | 20 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 2 + .../opt/check_for_underconstrained_values.rs | 411 ++++++++++++++++++ compiler/noirc_evaluator/src/ssa/opt/mod.rs | 1 + tooling/lsp/src/notifications/mod.rs | 1 + 7 files changed, 475 insertions(+), 6 deletions(-) create mode 100644 compiler/noirc_evaluator/src/ssa/opt/check_for_underconstrained_values.rs diff --git a/compiler/noirc_errors/src/reporter.rs b/compiler/noirc_errors/src/reporter.rs index d817b48691f..ea7c97b1f25 100644 --- a/compiler/noirc_errors/src/reporter.rs +++ b/compiler/noirc_errors/src/reporter.rs @@ -17,6 +17,7 @@ pub struct CustomDiagnostic { #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum DiagnosticKind { Error, + Bug, Warning, } @@ -62,6 +63,19 @@ impl CustomDiagnostic { } } + pub fn simple_bug( + primary_message: String, + secondary_message: String, + secondary_span: Span, + ) -> CustomDiagnostic { + CustomDiagnostic { + message: primary_message, + secondaries: vec![CustomLabel::new(secondary_message, secondary_span)], + notes: Vec::new(), + kind: DiagnosticKind::Bug, + } + } + pub fn in_file(self, file_id: fm::FileId) -> FileDiagnostic { FileDiagnostic::new(file_id, self) } @@ -81,6 +95,10 @@ impl CustomDiagnostic { pub fn is_warning(&self) -> bool { matches!(self.kind, DiagnosticKind::Warning) } + + pub fn is_bug(&self) -> bool { + matches!(self.kind, DiagnosticKind::Bug) + } } impl std::fmt::Display for CustomDiagnostic { @@ -120,10 +138,13 @@ pub fn report_all<'files>( silence_warnings: bool, ) -> ReportedErrors { // Report warnings before any errors - let (warnings, mut errors): (Vec<_>, _) = - diagnostics.iter().partition(|item| item.diagnostic.is_warning()); + let (warnings_and_bugs, mut errors): (Vec<_>, _) = + diagnostics.iter().partition(|item| !item.diagnostic.is_error()); + let (warnings, mut bugs): (Vec<_>, _) = + warnings_and_bugs.iter().partition(|item| item.diagnostic.is_warning()); let mut diagnostics = if silence_warnings { Vec::new() } else { warnings }; + diagnostics.append(&mut bugs); diagnostics.append(&mut errors); let error_count = @@ -170,6 +191,7 @@ fn convert_diagnostic( ) -> Diagnostic { let diagnostic = match (cd.kind, deny_warnings) { (DiagnosticKind::Warning, false) => Diagnostic::warning(), + (DiagnosticKind::Bug, ..) => Diagnostic::bug(), _ => Diagnostic::error(), }; diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 0879056ad36..2725abb1f63 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -56,6 +56,7 @@ pub enum RuntimeError { #[derive(Debug, Clone, Serialize, Deserialize)] pub enum SsaReport { Warning(InternalWarning), + Bug(InternalBug), } impl From for FileDiagnostic { @@ -78,6 +79,19 @@ impl From for FileDiagnostic { Diagnostic::simple_warning(message, secondary_message, location.span); diagnostic.in_file(file_id).with_call_stack(call_stack) } + SsaReport::Bug(bug) => { + let message = bug.to_string(); + let (secondary_message, call_stack) = match bug { + InternalBug::IndependentSubgraph { call_stack } => { + ("There is no path from the output of this brillig call to either return values or inputs of the circuit, which creates an independent subgraph. This is quite likely a soundness vulnerability".to_string(),call_stack) + } + }; + let call_stack = vecmap(call_stack, |location| location); + let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); + let location = call_stack.last().expect("Expected RuntimeError to have a location"); + let diagnostic = Diagnostic::simple_bug(message, secondary_message, location.span); + diagnostic.in_file(file_id).with_call_stack(call_stack) + } } } } @@ -90,6 +104,12 @@ pub enum InternalWarning { VerifyProof { call_stack: CallStack }, } +#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize)] +pub enum InternalBug { + #[error("Input to brillig function is in a separate subgraph to output")] + IndependentSubgraph { call_stack: CallStack }, +} + #[derive(Debug, PartialEq, Eq, Clone, Error)] pub enum InternalError { #[error("ICE: Both expressions should have degree<=1")] diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 0e0c3f1e631..e1182f17bcd 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -41,6 +41,8 @@ pub mod ir; mod opt; pub mod ssa_gen; +pub(crate) struct ArtifactsAndWarnings(Artifacts, Vec); + /// Optimize the given program by converting it into SSA /// form and performing optimizations there. When finished, /// convert the final SSA into an ACIR program and return it. @@ -49,10 +51,10 @@ pub mod ssa_gen; pub(crate) fn optimize_into_acir( program: Program, options: &SsaEvaluatorOptions, -) -> Result { +) -> Result { let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); - let ssa = SsaBuilder::new( + let mut ssa = SsaBuilder::new( program, options.enable_ssa_logging, options.force_brillig_output, @@ -89,13 +91,15 @@ pub(crate) fn optimize_into_acir( .run_pass(Ssa::array_set_optimization, "After Array Set Optimizations:") .finish(); + let ssa_level_warnings = ssa.check_for_underconstrained_values(); let brillig = time("SSA to Brillig", options.print_codegen_timings, || { ssa.to_brillig(options.enable_brillig_logging) }); drop(ssa_gen_span_guard); - time("SSA to ACIR", options.print_codegen_timings, || ssa.into_acir(&brillig)) + let artifacts = time("SSA to ACIR", options.print_codegen_timings, || ssa.into_acir(&brillig))?; + Ok(ArtifactsAndWarnings(artifacts, ssa_level_warnings)) } // Helper to time SSA passes @@ -149,6 +153,10 @@ impl SsaProgramArtifact { } self.names.push(circuit_artifact.name); } + + fn add_warnings(&mut self, mut warnings: Vec) { + self.warnings.append(&mut warnings); + } } pub struct SsaEvaluatorOptions { @@ -179,7 +187,8 @@ pub fn create_program( let func_sigs = program.function_signatures.clone(); let recursive = program.recursive; - let (generated_acirs, generated_brillig, error_types) = optimize_into_acir(program, options)?; + let ArtifactsAndWarnings((generated_acirs, generated_brillig, error_types), ssa_level_warnings) = + optimize_into_acir(program, options)?; assert_eq!( generated_acirs.len(), func_sigs.len(), @@ -187,6 +196,9 @@ pub fn create_program( ); let mut program_artifact = SsaProgramArtifact::new(generated_brillig, error_types); + + // Add warnings collected at the Ssa stage + program_artifact.add_warnings(ssa_level_warnings); // For setting up the ABI we need separately specify main's input and return witnesses let mut is_main = true; for (acir, func_sig) in generated_acirs.into_iter().zip(func_sigs) { diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index aaa483b91c9..3b7d2c1025f 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -431,6 +431,8 @@ impl<'a> Context<'a> { } warnings.extend(return_warnings); + + // Add the warnings from the alter Ssa passes Ok(self.acir_context.finish(input_witness, return_witnesses, warnings)) } diff --git a/compiler/noirc_evaluator/src/ssa/opt/check_for_underconstrained_values.rs b/compiler/noirc_evaluator/src/ssa/opt/check_for_underconstrained_values.rs new file mode 100644 index 00000000000..6dac99d7a09 --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/check_for_underconstrained_values.rs @@ -0,0 +1,411 @@ +//! This module defines an SSA pass that detects if the final function has any subgraphs independent from inputs and outputs. +//! If this is the case, then part of the final circuit can be completely replaced by any other passing circuit, since there are no constraints ensuring connections. +//! So the compiler informs the developer of this as a bug +use im::HashMap; + +use crate::errors::{InternalBug, SsaReport}; +use crate::ssa::ir::basic_block::BasicBlockId; +use crate::ssa::ir::function::RuntimeType; +use crate::ssa::ir::function::{Function, FunctionId}; +use crate::ssa::ir::instruction::{Instruction, InstructionId, Intrinsic}; +use crate::ssa::ir::value::{Value, ValueId}; +use crate::ssa::ssa_gen::Ssa; +use std::collections::{BTreeMap, HashSet}; + +impl Ssa { + /// Go through each top-level non-brillig function and detect if it has independent subgraphs + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn check_for_underconstrained_values(&mut self) -> Vec { + let mut warnings: Vec = Vec::new(); + for function in self.functions.values() { + match function.runtime() { + RuntimeType::Acir { .. } => { + warnings.extend(check_for_underconstrained_values_within_function( + function, + &self.functions, + )); + } + RuntimeType::Brillig => (), + } + } + warnings + } +} + +/// Detect independent subgraphs (not connected to function inputs or outputs) and return a vector of bug reports if some are found +fn check_for_underconstrained_values_within_function( + function: &Function, + all_functions: &BTreeMap, +) -> Vec { + let mut context = Context::default(); + let mut warnings: Vec = Vec::new(); + + context.compute_sets_of_connected_value_ids(function, all_functions); + + let all_brillig_generated_values: HashSet = + context.brillig_return_to_argument.keys().copied().collect(); + + let connected_sets_indices = + context.find_sets_connected_to_function_inputs_or_outputs(function); + + // Go through each disconnected set, find brillig calls that caused it and form warnings + for set_index in + HashSet::from_iter(0..(context.value_sets.len())).difference(&connected_sets_indices) + { + let current_set = &context.value_sets[*set_index]; + warnings.append(&mut context.find_disconnecting_brillig_calls_with_results_in_set( + current_set, + &all_brillig_generated_values, + function, + )); + } + warnings +} +#[derive(Default)] +struct Context { + visited_blocks: HashSet, + block_queue: Vec, + value_sets: Vec>, + brillig_return_to_argument: HashMap>, + brillig_return_to_instruction_id: HashMap, +} + +impl Context { + /// Compute sets of variable ValueIds that are connected with constraints + /// + /// Additionally, store information about brillig calls in the context + fn compute_sets_of_connected_value_ids( + &mut self, + function: &Function, + all_functions: &BTreeMap, + ) { + // Go through each block in the function and create a list of sets of ValueIds connected by instructions + self.block_queue.push(function.entry_block()); + while let Some(block) = self.block_queue.pop() { + if self.visited_blocks.contains(&block) { + continue; + } + self.visited_blocks.insert(block); + self.connect_value_ids_in_block(function, block, all_functions); + } + + // Merge ValueIds into sets, where each original small set of ValueIds is merged with another set if they intersect + self.merge_sets(); + } + + /// Find sets that contain input or output value of the function + /// + /// Goes through each set of connected ValueIds and see if function arguments or return values are in the set + fn find_sets_connected_to_function_inputs_or_outputs( + &mut self, + function: &Function, + ) -> HashSet { + let variable_parameters_and_return_values = function + .parameters() + .iter() + .chain(function.returns()) + .filter(|id| function.dfg.get_numeric_constant(**id).is_none()) + .copied(); + + let mut connected_sets_indices: HashSet = HashSet::new(); + + // Go through each parameter and each set and check if the set contains the parameter + // If it's the case, then that set doesn't present an issue + for parameter_or_return_value in variable_parameters_and_return_values { + for (set_index, final_set) in self.value_sets.iter().enumerate() { + if final_set.contains(¶meter_or_return_value) { + connected_sets_indices.insert(set_index); + } + } + } + connected_sets_indices + } + + /// Find which brillig calls separate this set from others and return bug warnings about them + fn find_disconnecting_brillig_calls_with_results_in_set( + &self, + current_set: &HashSet, + all_brillig_generated_values: &HashSet, + function: &Function, + ) -> Vec { + let mut warnings = Vec::new(); + // Find brillig-generated values in the set + let intersection = all_brillig_generated_values.intersection(current_set).copied(); + + // Go through all brillig outputs in the set + for brillig_output_in_set in intersection { + // Get the inputs that correspond to the output + let inputs: HashSet = + self.brillig_return_to_argument[&brillig_output_in_set].iter().copied().collect(); + + // Check if any of them are not in the set + let unused_inputs = inputs.difference(current_set).next().is_some(); + + // There is a value not in the set, which means that the inputs/outputs of this call have not been properly constrained + if unused_inputs { + warnings.push(SsaReport::Bug(InternalBug::IndependentSubgraph { + call_stack: function.dfg.get_call_stack( + self.brillig_return_to_instruction_id[&brillig_output_in_set], + ), + })); + } + } + warnings + } + /// Go through each instruction in the block and add a set of ValueIds connected through that instruction + /// + /// Additionally, this function adds mappings of brillig return values to call arguments and instruction ids from calls to brillig functions in the block + fn connect_value_ids_in_block( + &mut self, + function: &Function, + block: BasicBlockId, + all_functions: &BTreeMap, + ) { + let instructions = function.dfg[block].instructions(); + + for instruction in instructions.iter() { + let mut instruction_arguments_and_results = HashSet::new(); + + // Insert non-constant instruction arguments + function.dfg[*instruction].for_each_value(|value_id| { + if function.dfg.get_numeric_constant(value_id).is_none() { + instruction_arguments_and_results.insert(value_id); + } + }); + // And non-constant results + for value_id in function.dfg.instruction_results(*instruction).iter() { + if function.dfg.get_numeric_constant(*value_id).is_none() { + instruction_arguments_and_results.insert(*value_id); + } + } + + // For most instructions we just connect inputs and outputs + match &function.dfg[*instruction] { + Instruction::ArrayGet { .. } + | Instruction::ArraySet { .. } + | Instruction::Binary(..) + | Instruction::Cast(..) + | Instruction::Constrain(..) + | Instruction::IfElse { .. } + | Instruction::Load { .. } + | Instruction::Not(..) + | Instruction::Store { .. } + | Instruction::Truncate { .. } => { + self.value_sets.push(instruction_arguments_and_results); + } + + Instruction::Call { func: func_id, arguments: argument_ids } => { + match &function.dfg[*func_id] { + Value::Intrinsic(intrinsic) => match intrinsic { + Intrinsic::ApplyRangeConstraint + | Intrinsic::AssertConstant + | Intrinsic::AsWitness + | Intrinsic::IsUnconstrained => {} + Intrinsic::ArrayLen + | Intrinsic::AsField + | Intrinsic::AsSlice + | Intrinsic::BlackBox(..) + | Intrinsic::DerivePedersenGenerators + | Intrinsic::FromField + | Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SlicePopBack + | Intrinsic::SlicePopFront + | Intrinsic::SliceInsert + | Intrinsic::SliceRemove + | Intrinsic::StaticAssert + | Intrinsic::StrAsBytes + | Intrinsic::ToBits(..) + | Intrinsic::ToRadix(..) => { + self.value_sets.push(instruction_arguments_and_results); + } + }, + Value::Function(callee) => match all_functions[&callee].runtime() { + RuntimeType::Brillig => { + // For calls to brillig functions we memorize the mapping of results to argument ValueId's and InstructionId's + // The latter are needed to produce the callstack later + for result in + function.dfg.instruction_results(*instruction).iter().filter( + |value_id| { + function.dfg.get_numeric_constant(**value_id).is_none() + }, + ) + { + self.brillig_return_to_argument + .insert(*result, argument_ids.clone()); + self.brillig_return_to_instruction_id + .insert(*result, *instruction); + } + } + RuntimeType::Acir(..) => { + self.value_sets.push(instruction_arguments_and_results); + } + }, + Value::ForeignFunction(..) => { + panic!("Should not be able to reach foreign function from non-brillig functions"); + } + Value::Array { .. } + | Value::Instruction { .. } + | Value::NumericConstant { .. } + | Value::Param { .. } => { + panic!("At the point we are running disconnect there shouldn't be any other values as arguments") + } + } + } + Instruction::Allocate { .. } + | Instruction::DecrementRc { .. } + | Instruction::EnableSideEffects { .. } + | Instruction::IncrementRc { .. } + | Instruction::RangeCheck { .. } => {} + } + } + + self.block_queue.extend(function.dfg[block].successors()); + } + + /// Merge all small sets into larger ones based on whether the sets intersect or not + /// + /// If two small sets have a common ValueId, we merge them into one + fn merge_sets(&mut self) { + let mut new_set_id: usize = 0; + let mut updated_sets: HashMap> = HashMap::new(); + let mut value_dictionary: HashMap = HashMap::new(); + let mut parsed_value_set: HashSet = HashSet::new(); + + // Go through each set + for set in self.value_sets.iter() { + // Check if the set has any of the ValueIds we've encountered at previous iterations + let intersection: HashSet = + set.intersection(&parsed_value_set).copied().collect(); + parsed_value_set.extend(set.iter()); + + // If there is no intersection, add the new set to updated sets + if intersection.is_empty() { + updated_sets.insert(new_set_id, set.clone()); + + // Add each entry to a dictionary for quick lookups of which ValueId is in which updated set + for entry in set.iter() { + value_dictionary.insert(*entry, new_set_id); + } + new_set_id += 1; + continue; + } + + // If there is an intersection, we have to join the sets + let mut joining_sets_ids: HashSet = + intersection.iter().map(|x| value_dictionary[x]).collect(); + let mut largest_set_size = usize::MIN; + let mut largest_set_index = usize::MAX; + + // We need to find the largest set to move as few elements as possible + for set_id in joining_sets_ids.iter() { + if updated_sets[set_id].len() > largest_set_size { + (largest_set_index, largest_set_size) = (*set_id, updated_sets[set_id].len()); + } + } + joining_sets_ids.remove(&largest_set_index); + + let mut largest_set = + updated_sets.extract(&largest_set_index).expect("Set should be in the hashmap").0; + + // For each of other sets that need to be joined + for set_id in joining_sets_ids.iter() { + // Map each element to the largest set and insert it + for element in updated_sets[set_id].iter() { + value_dictionary[element] = largest_set_index; + largest_set.insert(*element); + } + // Remove the old set + updated_sets.remove(set_id); + } + + // Join the new set with the largest sets + for element in set.iter() { + value_dictionary.insert(*element, largest_set_index); + largest_set.insert(*element); + } + updated_sets.insert(largest_set_index, largest_set); + } + self.value_sets = updated_sets.values().cloned().collect(); + } +} +#[cfg(test)] +mod test { + use crate::ssa::{ + function_builder::FunctionBuilder, + ir::{instruction::BinaryOp, map::Id, types::Type}, + }; + + #[test] + /// Test that a connected function raises no warnings + fn test_simple_connected_function() { + // fn main { + // b0(v0: Field, v1: Field): + // v2 = add v0, 1 + // v3 = mul v1, 2 + // v4 = eq v2, v3 + // return v2 + // } + let main_id = Id::test_new(0); + let mut builder = FunctionBuilder::new("main".into(), main_id); + let v0 = builder.add_parameter(Type::field()); + let v1 = builder.add_parameter(Type::field()); + + let one = builder.field_constant(1u128); + let two = builder.field_constant(2u128); + + let v2 = builder.insert_binary(v0, BinaryOp::Add, one); + let v3 = builder.insert_binary(v1, BinaryOp::Mul, two); + let _v4 = builder.insert_binary(v2, BinaryOp::Eq, v3); + builder.terminate_with_return(vec![v2]); + + let mut ssa = builder.finish(); + let ssa_level_warnings = ssa.check_for_underconstrained_values(); + assert_eq!(ssa_level_warnings.len(), 0); + } + + #[test] + /// Test where the results of a call to a brillig function are not connected to main function inputs or outputs + /// This should be detected. + fn test_simple_function_with_disconnected_part() { + // unconstrained fn br(v0: Field, v1: Field){ + // v2 = add v0, v1 + // return v2 + // } + // + // fn main { + // b0(v0: Field, v1: Field): + // v2 = add v0, 1 + // v3 = mul v1, 2 + // v4 = call br(v2, v3) + // v5 = add v4, 2 + // return + // } + let main_id = Id::test_new(0); + let mut builder = FunctionBuilder::new("main".into(), main_id); + let v0 = builder.add_parameter(Type::field()); + let v1 = builder.add_parameter(Type::field()); + + let one = builder.field_constant(1u128); + let two = builder.field_constant(2u128); + + let v2 = builder.insert_binary(v0, BinaryOp::Add, one); + let v3 = builder.insert_binary(v1, BinaryOp::Mul, two); + + let br_function_id = Id::test_new(1); + let br_function = builder.import_function(br_function_id); + let v4 = builder.insert_call(br_function, vec![v2, v3], vec![Type::field()])[0]; + let v5 = builder.insert_binary(v4, BinaryOp::Add, two); + builder.insert_constrain(v5, one, None); + builder.terminate_with_return(vec![]); + + builder.new_brillig_function("br".into(), br_function_id); + let v0 = builder.add_parameter(Type::field()); + let v1 = builder.add_parameter(Type::field()); + let v2 = builder.insert_binary(v0, BinaryOp::Add, v1); + builder.terminate_with_return(vec![v2]); + let mut ssa = builder.finish(); + let ssa_level_warnings = ssa.check_for_underconstrained_values(); + assert_eq!(ssa_level_warnings.len(), 1); + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 4e5fa262696..56484ced290 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -7,6 +7,7 @@ mod array_set; mod as_slice_length; mod assert_constant; mod bubble_up_constrains; +mod check_for_underconstrained_values; mod constant_folding; mod defunctionalize; mod die; diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index da3b95ce8e0..ed0ac13fae5 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -190,6 +190,7 @@ fn process_noir_document( let severity = match diagnostic.kind { DiagnosticKind::Error => DiagnosticSeverity::ERROR, DiagnosticKind::Warning => DiagnosticSeverity::WARNING, + DiagnosticKind::Bug => DiagnosticSeverity::WARNING, }; Some(Diagnostic { range, From 30c50f52a6d58163e39006b73f4eb5003afc239b Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Fri, 5 Jul 2024 16:34:28 -0300 Subject: [PATCH 5/5] fix: correct range for overlfowing/underflowing integer assignment (#5416) # Description ## Problem Resolves #5419 ## Summary This is similar to #5372, except that here the compiler does an early check with explicitly annotated types, and in the related issue the check is done later on, once unknown types are resolved. Also in this PR: - a test was moved from `test_programs` to `noirc_frontend/src/tests.rs` because the error produced here can be caught there - the error message produced in both places is now exactly the same (showing the possible range of values, which helps to know why you exceeded the min/max) ## Additional Context None ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/errors.rs | 9 ++- compiler/noirc_evaluator/src/ssa/ir/types.rs | 56 +++++++++----- .../src/ssa/ssa_gen/context.rs | 5 +- .../noirc_frontend/src/elaborator/lints.rs | 48 +++++++----- .../src/elaborator/statements.rs | 4 +- .../src/hir/type_check/errors.rs | 2 +- .../noirc_frontend/src/hir/type_check/stmt.rs | 2 +- compiler/noirc_frontend/src/hir_def/types.rs | 4 + compiler/noirc_frontend/src/tests.rs | 76 +++++++++++++++++++ cspell.json | 1 + .../overflowing_assignment/Nargo.toml | 5 -- .../overflowing_assignment/src/main.nr | 5 -- 12 files changed, 160 insertions(+), 57 deletions(-) delete mode 100644 test_programs/compile_failure/overflowing_assignment/Nargo.toml delete mode 100644 test_programs/compile_failure/overflowing_assignment/src/main.nr diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 2725abb1f63..2b328898257 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -23,8 +23,13 @@ pub enum RuntimeError { IndexOutOfBounds { index: usize, array_size: usize, call_stack: CallStack }, #[error("Range constraint of {num_bits} bits is too large for the Field size")] InvalidRangeConstraint { num_bits: u32, call_stack: CallStack }, - #[error("{value} does not fit within the type bounds for {typ}")] - IntegerOutOfBounds { value: FieldElement, typ: NumericType, call_stack: CallStack }, + #[error("The value `{value:?}` cannot fit into `{typ}` which has range `{range}`")] + IntegerOutOfBounds { + value: FieldElement, + typ: NumericType, + range: String, + call_stack: CallStack, + }, #[error("Expected array index to fit into a u64")] TypeConversion { from: String, into: String, call_stack: CallStack }, #[error("{name:?} is not initialized")] diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 0b0ccb7e1a4..56729a5cba9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -29,23 +29,37 @@ impl NumericType { } } - /// Returns true if the given Field value is within the numeric limits - /// for the current NumericType. - pub(crate) fn value_is_within_limits(self, field: FieldElement, negative: bool) -> bool { + /// Returns None if the given Field value is within the numeric limits + /// for the current NumericType. Otherwise returns a string describing + /// the limits, as a range. + pub(crate) fn value_is_outside_limits( + self, + field: FieldElement, + negative: bool, + ) -> Option { match self { NumericType::Unsigned { bit_size } => { + let max = 2u128.pow(bit_size) - 1; if negative { - return false; + return Some(format!("0..={}", max)); + } + if field <= max.into() { + None + } else { + Some(format!("0..={}", max)) } - let max = 2u128.pow(bit_size) - 1; - field <= max.into() } NumericType::Signed { bit_size } => { - let max = - if negative { 2u128.pow(bit_size - 1) } else { 2u128.pow(bit_size - 1) - 1 }; - field <= max.into() + let min = 2u128.pow(bit_size - 1); + let max = 2u128.pow(bit_size - 1) - 1; + let target_max = if negative { min } else { max }; + if field <= target_max.into() { + None + } else { + Some(format!("-{}..={}", min, max)) + } } - NumericType::NativeField => true, + NumericType::NativeField => None, } } } @@ -224,21 +238,21 @@ mod tests { use super::*; #[test] - fn test_u8_is_within_limits() { + fn test_u8_value_is_outside_limits() { let u8 = NumericType::Unsigned { bit_size: 8 }; - assert!(!u8.value_is_within_limits(FieldElement::from(1_i128), true)); - assert!(u8.value_is_within_limits(FieldElement::from(0_i128), false)); - assert!(u8.value_is_within_limits(FieldElement::from(255_i128), false)); - assert!(!u8.value_is_within_limits(FieldElement::from(256_i128), false)); + assert!(u8.value_is_outside_limits(FieldElement::from(1_i128), true).is_some()); + assert!(u8.value_is_outside_limits(FieldElement::from(0_i128), false).is_none()); + assert!(u8.value_is_outside_limits(FieldElement::from(255_i128), false).is_none()); + assert!(u8.value_is_outside_limits(FieldElement::from(256_i128), false).is_some()); } #[test] - fn test_i8_is_within_limits() { + fn test_i8_value_is_outside_limits() { let i8 = NumericType::Signed { bit_size: 8 }; - assert!(!i8.value_is_within_limits(FieldElement::from(129_i128), true)); - assert!(i8.value_is_within_limits(FieldElement::from(128_i128), true)); - assert!(i8.value_is_within_limits(FieldElement::from(0_i128), false)); - assert!(i8.value_is_within_limits(FieldElement::from(127_i128), false)); - assert!(!i8.value_is_within_limits(FieldElement::from(128_i128), false)); + assert!(i8.value_is_outside_limits(FieldElement::from(129_i128), true).is_some()); + assert!(i8.value_is_outside_limits(FieldElement::from(128_i128), true).is_none()); + assert!(i8.value_is_outside_limits(FieldElement::from(0_i128), false).is_none()); + assert!(i8.value_is_outside_limits(FieldElement::from(127_i128), false).is_none()); + assert!(i8.value_is_outside_limits(FieldElement::from(128_i128), false).is_some()); } } diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index fb79266768c..e013546f14a 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -272,11 +272,12 @@ impl<'a> FunctionContext<'a> { let value = value.into(); if let Type::Numeric(numeric_type) = typ { - if !numeric_type.value_is_within_limits(value, negative) { + if let Some(range) = numeric_type.value_is_outside_limits(value, negative) { let call_stack = self.builder.get_call_stack(); return Err(RuntimeError::IntegerOutOfBounds { - value, + value: if negative { -value } else { value }, typ: numeric_type, + range, call_stack, }); } diff --git a/compiler/noirc_frontend/src/elaborator/lints.rs b/compiler/noirc_frontend/src/elaborator/lints.rs index af6f4cdb42f..a4140043ac1 100644 --- a/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/compiler/noirc_frontend/src/elaborator/lints.rs @@ -7,13 +7,12 @@ use crate::{ }, hir_def::expr::HirIdent, macros_api::{ - HirExpression, HirLiteral, NodeInterner, NoirFunction, UnaryOp, UnresolvedTypeData, - Visibility, + HirExpression, HirLiteral, NodeInterner, NoirFunction, Signedness, UnaryOp, + UnresolvedTypeData, Visibility, }, node_interner::{DefinitionKind, ExprId, FuncId}, Type, }; -use acvm::AcirField; use noirc_errors::Span; @@ -196,8 +195,8 @@ pub(super) fn unnecessary_pub_argument( } /// Check if an assignment is overflowing with respect to `annotated_type` -/// in a declaration statement where `annotated_type` is an unsigned integer -pub(crate) fn overflowing_uint( +/// in a declaration statement where `annotated_type` is a signed or unsigned integer +pub(crate) fn overflowing_int( interner: &NodeInterner, rhs_expr: &ExprId, annotated_type: &Type, @@ -207,23 +206,36 @@ pub(crate) fn overflowing_uint( let mut errors = Vec::with_capacity(2); match expr { - HirExpression::Literal(HirLiteral::Integer(value, false)) => { - let v = value.to_u128(); - if let Type::Integer(_, bit_count) = annotated_type { + HirExpression::Literal(HirLiteral::Integer(value, negative)) => match annotated_type { + Type::Integer(Signedness::Unsigned, bit_count) => { let bit_count: u32 = (*bit_count).into(); - let max = 1 << bit_count; - if v >= max { + let max = 2u128.pow(bit_count) - 1; + if value > max.into() || negative { errors.push(TypeCheckError::OverflowingAssignment { - expr: value, + expr: if negative { -value } else { value }, ty: annotated_type.clone(), - range: format!("0..={}", max - 1), + range: format!("0..={}", max), span, }); - }; - }; - } + } + } + Type::Integer(Signedness::Signed, bit_count) => { + let bit_count: u32 = (*bit_count).into(); + let min = 2u128.pow(bit_count - 1); + let max = 2u128.pow(bit_count - 1) - 1; + if (negative && value > min.into()) || (!negative && value > max.into()) { + errors.push(TypeCheckError::OverflowingAssignment { + expr: if negative { -value } else { value }, + ty: annotated_type.clone(), + range: format!("-{}..={}", min, max), + span, + }); + } + } + _ => (), + }, HirExpression::Prefix(expr) => { - overflowing_uint(interner, &expr.rhs, annotated_type); + overflowing_int(interner, &expr.rhs, annotated_type); if expr.operator == UnaryOp::Minus { errors.push(TypeCheckError::InvalidUnaryOp { kind: "annotated_type".to_string(), @@ -232,8 +244,8 @@ pub(crate) fn overflowing_uint( } } HirExpression::Infix(expr) => { - errors.extend(overflowing_uint(interner, &expr.lhs, annotated_type)); - errors.extend(overflowing_uint(interner, &expr.rhs, annotated_type)); + errors.extend(overflowing_int(interner, &expr.lhs, annotated_type)); + errors.extend(overflowing_int(interner, &expr.rhs, annotated_type)); } _ => {} } diff --git a/compiler/noirc_frontend/src/elaborator/statements.rs b/compiler/noirc_frontend/src/elaborator/statements.rs index 8d97bd1a25d..6aed0ab196d 100644 --- a/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/compiler/noirc_frontend/src/elaborator/statements.rs @@ -86,8 +86,8 @@ impl<'context> Elaborator<'context> { expr_span, } }); - if annotated_type.is_unsigned() { - let errors = lints::overflowing_uint(self.interner, &expression, &annotated_type); + if annotated_type.is_integer() { + let errors = lints::overflowing_int(self.interner, &expression, &annotated_type); for error in errors { self.push_err(error); } diff --git a/compiler/noirc_frontend/src/hir/type_check/errors.rs b/compiler/noirc_frontend/src/hir/type_check/errors.rs index 9c6a39d1940..e5c52213056 100644 --- a/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -35,7 +35,7 @@ pub enum Source { pub enum TypeCheckError { #[error("Operator {op:?} cannot be used in a {place:?}")] OpCannotBeUsed { op: HirBinaryOp, place: &'static str, span: Span }, - #[error("The literal `{expr:?}` cannot fit into `{ty}` which has range `{range}`")] + #[error("The value `{expr:?}` cannot fit into `{ty}` which has range `{range}`")] OverflowingAssignment { expr: FieldElement, ty: Type, range: String, span: Span }, #[error("Type {typ:?} cannot be used in a {place:?}")] TypeCannotBeUsed { typ: Type, place: &'static str, span: Span }, diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 3a570922c81..9abd1b34690 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -368,7 +368,7 @@ impl<'interner> TypeChecker<'interner> { let max = 1 << bit_count; if v >= max { self.errors.push(TypeCheckError::OverflowingAssignment { - expr: value, + expr: -value, ty: annotated_type.clone(), range: format!("0..={}", max - 1), span, diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index b529ca17887..6777c0d1c79 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -665,6 +665,10 @@ impl Type { matches!(self.follow_bindings(), Type::Bool) } + pub fn is_integer(&self) -> bool { + matches!(self.follow_bindings(), Type::Integer(_, _)) + } + pub fn is_signed(&self) -> bool { matches!(self.follow_bindings(), Type::Integer(Signedness::Signed, _)) } diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index dbfa5222ca4..10183ae0ac9 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -1907,3 +1907,79 @@ fn comptime_let() { let errors = get_program_errors(src); assert_eq!(errors.len(), 0); } + +#[test] +fn overflowing_u8() { + let src = r#" + fn main() { + let _: u8 = 256; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `2⁸` cannot fit into `u8` which has range `0..=255`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} + +#[test] +fn underflowing_u8() { + let src = r#" + fn main() { + let _: u8 = -1; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `-1` cannot fit into `u8` which has range `0..=255`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} + +#[test] +fn overflowing_i8() { + let src = r#" + fn main() { + let _: i8 = 128; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `2⁷` cannot fit into `i8` which has range `-128..=127`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} + +#[test] +fn underflowing_i8() { + let src = r#" + fn main() { + let _: i8 = -129; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `-129` cannot fit into `i8` which has range `-128..=127`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} diff --git a/cspell.json b/cspell.json index b3a39108f24..2a9bfb4b544 100644 --- a/cspell.json +++ b/cspell.json @@ -148,6 +148,7 @@ "nomicfoundation", "noncanonical", "nouner", + "overflowing", "pedersen", "peekable", "petgraph", diff --git a/test_programs/compile_failure/overflowing_assignment/Nargo.toml b/test_programs/compile_failure/overflowing_assignment/Nargo.toml deleted file mode 100644 index 612e3e7aaf6..00000000000 --- a/test_programs/compile_failure/overflowing_assignment/Nargo.toml +++ /dev/null @@ -1,5 +0,0 @@ -[package] -name = "overflowing_assignment" -type = "bin" -authors = [""] -[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/overflowing_assignment/src/main.nr b/test_programs/compile_failure/overflowing_assignment/src/main.nr deleted file mode 100644 index 6b529103ca3..00000000000 --- a/test_programs/compile_failure/overflowing_assignment/src/main.nr +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - let x:u8 = -1; - let y:u8 = 300; - assert(x != y); -}