diff --git a/.aztec-sync-commit b/.aztec-sync-commit index b9e87cffedc..82e81e9d74c 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -1c74387e56b49102043fc6701735325a891e6c65 +221e2479622aef8e70120dc0a9f91ffcbc99efba diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index e6098dd269c..9668e3f629f 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -399,6 +399,12 @@ jobs: - name: Checkout uses: actions/checkout@v4 + - name: Install `bb` + run: | + ./scripts/install_bb.sh + echo "$HOME/.barretenberg/" >> $GITHUB_PATH + export PATH="$PATH:$HOME/.barretenberg/" + - name: Download nargo binary uses: actions/download-artifact@v4 with: @@ -448,7 +454,7 @@ jobs: test-integration-browser: name: Integration Tests (Browser) runs-on: ubuntu-latest - needs: [build-acvm-js, build-noir-wasm, build-nargo, build-noirc-abi] + needs: [build-acvm-js, build-noir-wasm, build-noirc-abi] timeout-minutes: 30 steps: @@ -490,6 +496,47 @@ jobs: run: | yarn test:browser + test-examples: + name: Example scripts + runs-on: ubuntu-latest + needs: [build-nargo] + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@v1.2.0 + + - name: Install `bb` + run: | + ./scripts/install_bb.sh + echo "$HOME/.barretenberg/" >> $GITHUB_PATH + export PATH="$PATH:$HOME/.barretenberg/" + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Run `prove_and_verify` + working-directory: ./examples/prove_and_verify + run: ./test.sh + + - name: Run `codegen_verifier` + working-directory: ./examples/codegen_verifier + run: ./test.sh + # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. tests-end: @@ -507,6 +554,7 @@ jobs: - test-noir-codegen - test-integration-node - test-integration-browser + - test-examples steps: - name: Report overall success diff --git a/Cargo.lock b/Cargo.lock index b5dc6f9bfdf..bfc012d23f0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -452,25 +452,6 @@ dependencies = [ "regex", ] -[[package]] -name = "backend-interface" -version = "0.30.0" -dependencies = [ - "acvm", - "bb_abstraction_leaks", - "build-target", - "const_format", - "dirs", - "flate2", - "reqwest", - "serde_json", - "tar", - "tempfile", - "test-binary", - "thiserror", - "tracing", -] - [[package]] name = "backtrace" version = "0.3.68" @@ -510,15 +491,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bb_abstraction_leaks" -version = "0.11.0" -dependencies = [ - "acvm", - "build-target", - "const_format", -] - [[package]] name = "bincode" version = "1.3.3" @@ -664,12 +636,6 @@ dependencies = [ "safe-regex", ] -[[package]] -name = "build-target" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "832133bbabbbaa9fbdba793456a2827627a7d2b8fb96032fa1e7666d7895832b" - [[package]] name = "bumpalo" version = "3.13.0" @@ -1408,15 +1374,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" -[[package]] -name = "encoding_rs" -version = "0.8.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "endian-type" version = "0.1.2" @@ -1787,25 +1744,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http", - "indexmap 2.2.6", - "slab", - "tokio", - "tokio-util 0.7.10", - "tracing", -] - [[package]] name = "half" version = "1.8.2" @@ -1922,7 +1860,6 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", "http", "http-body", "httparse", @@ -1936,20 +1873,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http", - "hyper", - "rustls", - "tokio", - "tokio-rustls", -] - [[package]] name = "iai" version = "0.1.1" @@ -2137,12 +2060,6 @@ dependencies = [ "cfg-if 1.0.0", ] -[[package]] -name = "ipnet" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" - [[package]] name = "is-terminal" version = "0.4.9" @@ -2517,12 +2434,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -2589,7 +2500,6 @@ dependencies = [ "assert_cmd", "assert_fs", "async-lsp", - "backend-interface", "bn254_blackbox_solver", "build-data", "clap", @@ -2600,7 +2510,6 @@ dependencies = [ "dap", "dirs", "fm", - "hex", "iai", "iter-extended", "nargo", @@ -3678,45 +3587,6 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" -[[package]] -name = "reqwest" -version = "0.11.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" -dependencies = [ - "base64 0.21.2", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-rustls", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding 2.3.1", - "pin-project-lite", - "rustls", - "rustls-pemfile", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-rustls", - "tower-service", - "url 2.5.0", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", - "winreg", -] - [[package]] name = "rexpect" version = "0.5.0" @@ -3750,36 +3620,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if 1.0.0", - "getrandom 0.2.15", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.52.0", -] - [[package]] name = "rust-embed" version = "6.8.1" @@ -3848,37 +3688,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "rustls" -version = "0.21.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" -dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki", - "sct", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" -dependencies = [ - "base64 0.21.2", -] - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - [[package]] name = "rustversion" version = "1.0.14" @@ -4011,16 +3820,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "sct" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" -dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", -] - [[package]] name = "sec1" version = "0.3.0" @@ -4129,18 +3928,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - [[package]] name = "serde_with" version = "3.2.0" @@ -4301,18 +4088,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "spki" version = "0.6.0" @@ -4430,17 +4205,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "tar" -version = "0.4.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" -dependencies = [ - "filetime", - "libc", - "xattr", -] - [[package]] name = "tempfile" version = "3.8.0" @@ -4646,16 +4410,6 @@ dependencies = [ "syn 2.0.64", ] -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.15" @@ -4693,7 +4447,6 @@ dependencies = [ "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -4940,18 +4693,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - [[package]] name = "url" version = "1.7.2" @@ -5151,12 +4892,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" - [[package]] name = "winapi" version = "0.3.9" @@ -5338,25 +5073,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if 1.0.0", - "windows-sys 0.48.0", -] - -[[package]] -name = "xattr" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" -dependencies = [ - "libc", -] - [[package]] name = "zerocopy" version = "0.7.32" diff --git a/Cargo.toml b/Cargo.toml index 670ae36dd4b..182580f8d67 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,8 +13,6 @@ members = [ "compiler/fm", "compiler/wasm", # Crates related to tooling built on top of the Noir compiler - "tooling/backend_interface", - "tooling/bb_abstraction_leaks", "tooling/lsp", "tooling/debugger", "tooling/nargo", diff --git a/acvm-repo/acir/codegen/acir.cpp b/acvm-repo/acir/codegen/acir.cpp index b7e75c4320d..47e184a6332 100644 --- a/acvm-repo/acir/codegen/acir.cpp +++ b/acvm-repo/acir/codegen/acir.cpp @@ -149,7 +149,7 @@ namespace Program { struct MultiScalarMul { std::vector points; std::vector scalars; - std::array outputs; + std::array outputs; friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; @@ -157,11 +157,9 @@ namespace Program { }; struct EmbeddedCurveAdd { - Program::FunctionInput input1_x; - Program::FunctionInput input1_y; - Program::FunctionInput input2_x; - Program::FunctionInput input2_y; - std::array outputs; + std::array input1; + std::array input2; + std::array outputs; friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); std::vector bincodeSerialize() const; @@ -292,6 +290,33 @@ namespace Program { static BlockId bincodeDeserialize(std::vector); }; + struct BlockType { + + struct Memory { + friend bool operator==(const Memory&, const Memory&); + std::vector bincodeSerialize() const; + static Memory bincodeDeserialize(std::vector); + }; + + struct CallData { + friend bool operator==(const CallData&, const CallData&); + std::vector bincodeSerialize() const; + static CallData bincodeDeserialize(std::vector); + }; + + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); + std::vector bincodeSerialize() const; + static ReturnData bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BlockType&, const BlockType&); + std::vector bincodeSerialize() const; + static BlockType bincodeDeserialize(std::vector); + }; + struct Expression { std::vector> mul_terms; std::vector> linear_combinations; @@ -428,6 +453,7 @@ namespace Program { struct MemoryInit { Program::BlockId block_id; std::vector init; + Program::BlockType block_type; friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; @@ -754,8 +780,10 @@ namespace Program { struct EmbeddedCurveAdd { Program::MemoryAddress input1_x; Program::MemoryAddress input1_y; + Program::MemoryAddress input1_infinite; Program::MemoryAddress input2_x; Program::MemoryAddress input2_y; + Program::MemoryAddress input2_infinite; Program::HeapArray result; friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); @@ -842,7 +870,17 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + struct ToRadix { + Program::MemoryAddress input; + uint32_t radix; + Program::HeapArray output; + + friend bool operator==(const ToRadix&, const ToRadix&); + std::vector bincodeSerialize() const; + static ToRadix bincodeDeserialize(std::vector); + }; + + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -2762,10 +2800,8 @@ Program::BlackBoxFuncCall::MultiScalarMul serde::Deserializable template void serde::Serializable::serialize(const Program::BlackBoxFuncCall::EmbeddedCurveAdd &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.input1_x, serializer); - serde::Serializable::serialize(obj.input1_y, serializer); - serde::Serializable::serialize(obj.input2_x, serializer); - serde::Serializable::serialize(obj.input2_y, serializer); + serde::Serializable::serialize(obj.input1, serializer); + serde::Serializable::serialize(obj.input2, serializer); serde::Serializable::serialize(obj.outputs, serializer); } @@ -2801,10 +2835,8 @@ template <> template Program::BlackBoxFuncCall::EmbeddedCurveAdd serde::Deserializable::deserialize(Deserializer &deserializer) { Program::BlackBoxFuncCall::EmbeddedCurveAdd obj; - obj.input1_x = serde::Deserializable::deserialize(deserializer); - obj.input1_y = serde::Deserializable::deserialize(deserializer); - obj.input2_x = serde::Deserializable::deserialize(deserializer); - obj.input2_y = serde::Deserializable::deserialize(deserializer); + obj.input1 = serde::Deserializable::deserialize(deserializer); + obj.input2 = serde::Deserializable::deserialize(deserializer); obj.outputs = serde::Deserializable::deserialize(deserializer); return obj; } @@ -3871,8 +3903,10 @@ namespace Program { inline bool operator==(const BlackBoxOp::EmbeddedCurveAdd &lhs, const BlackBoxOp::EmbeddedCurveAdd &rhs) { if (!(lhs.input1_x == rhs.input1_x)) { return false; } if (!(lhs.input1_y == rhs.input1_y)) { return false; } + if (!(lhs.input1_infinite == rhs.input1_infinite)) { return false; } if (!(lhs.input2_x == rhs.input2_x)) { return false; } if (!(lhs.input2_y == rhs.input2_y)) { return false; } + if (!(lhs.input2_infinite == rhs.input2_infinite)) { return false; } if (!(lhs.result == rhs.result)) { return false; } return true; } @@ -3899,8 +3933,10 @@ template void serde::Serializable::serialize(const Program::BlackBoxOp::EmbeddedCurveAdd &obj, Serializer &serializer) { serde::Serializable::serialize(obj.input1_x, serializer); serde::Serializable::serialize(obj.input1_y, serializer); + serde::Serializable::serialize(obj.input1_infinite, serializer); serde::Serializable::serialize(obj.input2_x, serializer); serde::Serializable::serialize(obj.input2_y, serializer); + serde::Serializable::serialize(obj.input2_infinite, serializer); serde::Serializable::serialize(obj.result, serializer); } @@ -3910,8 +3946,10 @@ Program::BlackBoxOp::EmbeddedCurveAdd serde::Deserializable::deserialize(deserializer); obj.input1_y = serde::Deserializable::deserialize(deserializer); + obj.input1_infinite = serde::Deserializable::deserialize(deserializer); obj.input2_x = serde::Deserializable::deserialize(deserializer); obj.input2_y = serde::Deserializable::deserialize(deserializer); + obj.input2_infinite = serde::Deserializable::deserialize(deserializer); obj.result = serde::Deserializable::deserialize(deserializer); return obj; } @@ -4265,6 +4303,50 @@ Program::BlackBoxOp::Sha256Compression serde::Deserializable BlackBoxOp::ToRadix::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::ToRadix BlackBoxOp::ToRadix::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::ToRadix &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input, serializer); + serde::Serializable::serialize(obj.radix, serializer); + serde::Serializable::serialize(obj.output, serializer); +} + +template <> +template +Program::BlackBoxOp::ToRadix serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxOp::ToRadix obj; + obj.input = serde::Deserializable::deserialize(deserializer); + obj.radix = serde::Deserializable::deserialize(deserializer); + obj.output = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlockId &lhs, const BlockId &rhs) { @@ -4307,6 +4389,153 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ return obj; } +namespace Program { + + inline bool operator==(const BlockType &lhs, const BlockType &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector BlockType::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType BlockType::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::BlockType serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Program::BlockType obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::Memory &lhs, const BlockType::Memory &rhs) { + return true; + } + + inline std::vector BlockType::Memory::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::Memory BlockType::Memory::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::Memory &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::Memory serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::Memory obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::CallData &lhs, const BlockType::CallData &rhs) { + return true; + } + + inline std::vector BlockType::CallData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::CallData BlockType::CallData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::CallData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::CallData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::CallData obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::ReturnData &lhs, const BlockType::ReturnData &rhs) { + return true; + } + + inline std::vector BlockType::ReturnData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::ReturnData BlockType::ReturnData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::ReturnData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::ReturnData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::ReturnData obj; + return obj; +} + namespace Program { inline bool operator==(const BrilligBytecode &lhs, const BrilligBytecode &rhs) { @@ -6443,6 +6672,7 @@ namespace Program { inline bool operator==(const Opcode::MemoryInit &lhs, const Opcode::MemoryInit &rhs) { if (!(lhs.block_id == rhs.block_id)) { return false; } if (!(lhs.init == rhs.init)) { return false; } + if (!(lhs.block_type == rhs.block_type)) { return false; } return true; } @@ -6468,6 +6698,7 @@ template void serde::Serializable::serialize(const Program::Opcode::MemoryInit &obj, Serializer &serializer) { serde::Serializable::serialize(obj.block_id, serializer); serde::Serializable::serialize(obj.init, serializer); + serde::Serializable::serialize(obj.block_type, serializer); } template <> @@ -6476,6 +6707,7 @@ Program::Opcode::MemoryInit serde::Deserializable:: Program::Opcode::MemoryInit obj; obj.block_id = serde::Deserializable::deserialize(deserializer); obj.init = serde::Deserializable::deserialize(deserializer); + obj.block_type = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/acvm-repo/acir/src/circuit/opcodes.rs b/acvm-repo/acir/src/circuit/opcodes.rs index 7db317c41ab..e6dc11dac78 100644 --- a/acvm-repo/acir/src/circuit/opcodes.rs +++ b/acvm-repo/acir/src/circuit/opcodes.rs @@ -11,6 +11,13 @@ mod memory_operation; pub use black_box_function_call::{BlackBoxFuncCall, FunctionInput}; pub use memory_operation::{BlockId, MemOp}; +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum BlockType { + Memory, + CallData, + ReturnData, +} + #[allow(clippy::large_enum_variant)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { @@ -30,6 +37,7 @@ pub enum Opcode { MemoryInit { block_id: BlockId, init: Vec, + block_type: BlockType, }, /// Calls to unconstrained functions BrilligCall { @@ -103,8 +111,12 @@ impl std::fmt::Display for Opcode { write!(f, "(id: {}, op {} at: {}) ", block_id.0, op.operation, op.index) } } - Opcode::MemoryInit { block_id, init } => { - write!(f, "INIT ")?; + Opcode::MemoryInit { block_id, init, block_type: databus } => { + match databus { + BlockType::Memory => write!(f, "INIT ")?, + BlockType::CallData => write!(f, "INIT CALLDATA ")?, + BlockType::ReturnData => write!(f, "INIT RETURNDATA ")?, + } write!(f, "(id: {}, len: {}) ", block_id.0, init.len()) } // We keep the display for a BrilligCall and circuit Call separate as they diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 115a33c1c9d..b0e77b15c2c 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -89,14 +89,12 @@ pub enum BlackBoxFuncCall { MultiScalarMul { points: Vec, scalars: Vec, - outputs: (Witness, Witness), + outputs: (Witness, Witness, Witness), }, EmbeddedCurveAdd { - input1_x: FunctionInput, - input1_y: FunctionInput, - input2_x: FunctionInput, - input2_y: FunctionInput, - outputs: (Witness, Witness), + input1: Box<[FunctionInput; 3]>, + input2: Box<[FunctionInput; 3]>, + outputs: (Witness, Witness, Witness), }, Keccak256 { inputs: Vec, @@ -245,9 +243,9 @@ impl BlackBoxFuncCall { inputs.extend(scalars.iter().copied()); inputs } - BlackBoxFuncCall::EmbeddedCurveAdd { - input1_x, input1_y, input2_x, input2_y, .. - } => vec![*input1_x, *input1_y, *input2_x, *input2_y], + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, .. } => { + vec![input1[0], input1[1], input2[0], input2[1]] + } BlackBoxFuncCall::RANGE { input } => vec![*input], BlackBoxFuncCall::SchnorrVerify { public_key_x, @@ -343,9 +341,11 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | BlackBoxFuncCall::PedersenHash { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], + BlackBoxFuncCall::PedersenCommitment { outputs, .. } => vec![outputs.0, outputs.1], BlackBoxFuncCall::MultiScalarMul { outputs, .. } - | BlackBoxFuncCall::PedersenCommitment { outputs, .. } - | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } => vec![outputs.0, outputs.1], + | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } => { + vec![outputs.0, outputs.1, outputs.2] + } BlackBoxFuncCall::RANGE { .. } | BlackBoxFuncCall::RecursiveAggregation { .. } | BlackBoxFuncCall::BigIntFromLeBytes { .. } diff --git a/acvm-repo/acir/src/lib.rs b/acvm-repo/acir/src/lib.rs index 24f27aae06f..f60f1b46b6a 100644 --- a/acvm-repo/acir/src/lib.rs +++ b/acvm-repo/acir/src/lib.rs @@ -41,7 +41,7 @@ mod reflection { circuit::{ brillig::{BrilligInputs, BrilligOutputs}, directives::Directive, - opcodes::BlackBoxFuncCall, + opcodes::{BlackBoxFuncCall, BlockType}, AssertionPayload, Circuit, ExpressionOrMemory, ExpressionWidth, Opcode, OpcodeLocation, Program, }, @@ -60,6 +60,7 @@ mod reflection { }; let mut tracer = Tracer::new(TracerConfig::default()); + tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index d9327f784e6..19e4beb6158 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -63,19 +63,26 @@ fn multi_scalar_mul_circuit() { points: vec![ FunctionInput { witness: Witness(1), num_bits: 128 }, FunctionInput { witness: Witness(2), num_bits: 128 }, + FunctionInput { witness: Witness(3), num_bits: 1 }, ], scalars: vec![ - FunctionInput { witness: Witness(3), num_bits: 128 }, FunctionInput { witness: Witness(4), num_bits: 128 }, + FunctionInput { witness: Witness(5), num_bits: 128 }, ], - outputs: (Witness(5), Witness(6)), + outputs: (Witness(6), Witness(7), Witness(8)), }); let circuit = Circuit { - current_witness_index: 7, + current_witness_index: 9, opcodes: vec![multi_scalar_mul], - private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), - return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(5), Witness(6)])), + private_parameters: BTreeSet::from([ + Witness(1), + Witness(2), + Witness(3), + Witness(4), + Witness(5), + ]), + return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(6), Witness(7), Witness(8)])), ..Circuit::default() }; let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; @@ -83,10 +90,10 @@ fn multi_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, - 30, 45, 218, 136, 141, 33, 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, - 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, 38, 63, 180, 243, 97, 3, 125, 173, - 118, 131, 153, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 219, 10, 0, 32, 8, 67, 243, 214, 5, 250, 232, + 62, 189, 69, 123, 176, 132, 195, 116, 50, 149, 114, 107, 0, 97, 127, 116, 2, 75, 243, 2, + 74, 53, 122, 202, 189, 211, 15, 106, 5, 13, 116, 238, 35, 221, 81, 230, 61, 249, 37, 253, + 250, 179, 79, 109, 218, 22, 67, 227, 173, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -347,7 +354,11 @@ fn complex_brillig_foreign_call() { fn memory_op_circuit() { let init = vec![Witness(1), Witness(2)]; - let memory_init = Opcode::MemoryInit { block_id: BlockId(0), init }; + let memory_init = Opcode::MemoryInit { + block_id: BlockId(0), + init, + block_type: acir::circuit::opcodes::BlockType::Memory, + }; let write = Opcode::MemoryOp { block_id: BlockId(0), op: MemOp::write_to_mem_index(FieldElement::from(1u128).into(), Witness(3).into()), @@ -371,11 +382,11 @@ fn memory_op_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, - 255, 171, 10, 154, 16, 210, 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, - 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, 216, 151, 227, 188, 52, 187, 92, - 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, 16, - 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, + 255, 171, 10, 82, 176, 232, 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, + 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, 217, 190, 24, 236, 75, 113, 94, 146, + 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, 11, 161, + 73, 39, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs b/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs index ee35385fa81..0b52ae295a2 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs @@ -11,7 +11,7 @@ pub(super) fn multi_scalar_mul( initial_witness: &mut WitnessMap, points: &[FunctionInput], scalars: &[FunctionInput], - outputs: (Witness, Witness), + outputs: (Witness, Witness, Witness), ) -> Result<(), OpcodeResolutionError> { let points: Result, _> = points.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); @@ -19,35 +19,44 @@ pub(super) fn multi_scalar_mul( let scalars: Result, _> = scalars.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); - let scalars: Vec<_> = scalars?.into_iter().cloned().collect(); - + let mut scalars_lo = Vec::new(); + let mut scalars_hi = Vec::new(); + for (i, scalar) in scalars?.into_iter().enumerate() { + if i % 2 == 0 { + scalars_lo.push(*scalar); + } else { + scalars_hi.push(*scalar); + } + } // Call the backend's multi-scalar multiplication function - let (res_x, res_y) = backend.multi_scalar_mul(&points, &scalars)?; + let (res_x, res_y, is_infinite) = + backend.multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; // Insert the resulting point into the witness map insert_value(&outputs.0, res_x, initial_witness)?; insert_value(&outputs.1, res_y, initial_witness)?; - + insert_value(&outputs.2, is_infinite, initial_witness)?; Ok(()) } pub(super) fn embedded_curve_add( backend: &impl BlackBoxFunctionSolver, initial_witness: &mut WitnessMap, - input1_x: FunctionInput, - input1_y: FunctionInput, - input2_x: FunctionInput, - input2_y: FunctionInput, - outputs: (Witness, Witness), + input1: [FunctionInput; 3], + input2: [FunctionInput; 3], + outputs: (Witness, Witness, Witness), ) -> Result<(), OpcodeResolutionError> { - let input1_x = witness_to_value(initial_witness, input1_x.witness)?; - let input1_y = witness_to_value(initial_witness, input1_y.witness)?; - let input2_x = witness_to_value(initial_witness, input2_x.witness)?; - let input2_y = witness_to_value(initial_witness, input2_y.witness)?; - let (res_x, res_y) = backend.ec_add(input1_x, input1_y, input2_x, input2_y)?; + let input1_x = witness_to_value(initial_witness, input1[0].witness)?; + let input1_y = witness_to_value(initial_witness, input1[1].witness)?; + let input1_infinite = witness_to_value(initial_witness, input1[2].witness)?; + let input2_x = witness_to_value(initial_witness, input2[0].witness)?; + let input2_y = witness_to_value(initial_witness, input2[1].witness)?; + let input2_infinite = witness_to_value(initial_witness, input2[2].witness)?; + let (res_x, res_y, res_infinite) = + backend.ec_add(input1_x, input1_y, input1_infinite, input2_x, input2_y, input2_infinite)?; insert_value(&outputs.0, res_x, initial_witness)?; insert_value(&outputs.1, res_y, initial_witness)?; - + insert_value(&outputs.2, res_infinite, initial_witness)?; Ok(()) } diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index a74f44b79dc..99ed09a52e4 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -164,16 +164,8 @@ pub(crate) fn solve( BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs } => { multi_scalar_mul(backend, initial_witness, points, scalars, *outputs) } - BlackBoxFuncCall::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, outputs } => { - embedded_curve_add( - backend, - initial_witness, - *input1_x, - *input1_y, - *input2_x, - *input2_y, - *outputs, - ) + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, outputs } => { + embedded_curve_add(backend, initial_witness, **input1, **input2, *outputs) } // Recursive aggregation will be entirely handled by the backend and is not solved by the ACVM BlackBoxFuncCall::RecursiveAggregation { .. } => Ok(()), diff --git a/acvm-repo/acvm/src/pwg/mod.rs b/acvm-repo/acvm/src/pwg/mod.rs index a4219adbfa6..f2649b93991 100644 --- a/acvm-repo/acvm/src/pwg/mod.rs +++ b/acvm-repo/acvm/src/pwg/mod.rs @@ -335,7 +335,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { &mut self.bigint_solver, ), Opcode::Directive(directive) => solve_directives(&mut self.witness_map, directive), - Opcode::MemoryInit { block_id, init } => { + Opcode::MemoryInit { block_id, init, .. } => { let solver = self.block_solvers.entry(*block_id).or_default(); solver.init(init, &self.witness_map) } diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index df61083eee4..495389d7b3e 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -4,7 +4,7 @@ use acir::{ brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlockId, MemOp}, + opcodes::{BlockId, BlockType, MemOp}, Opcode, OpcodeLocation, }, native_types::{Expression, Witness, WitnessMap}, @@ -658,7 +658,11 @@ fn memory_operations() { let block_id = BlockId(0); - let init = Opcode::MemoryInit { block_id, init: (1..6).map(Witness).collect() }; + let init = Opcode::MemoryInit { + block_id, + init: (1..6).map(Witness).collect(), + block_type: BlockType::Memory, + }; let read_op = Opcode::MemoryOp { block_id, diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 16fb26e55db..ee93413ab85 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt +# require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/acvm-repo/acvm_js/test/shared/memory_op.ts b/acvm-repo/acvm_js/test/shared/memory_op.ts index 20ea88c7130..f7443c2258b 100644 --- a/acvm-repo/acvm_js/test/shared/memory_op.ts +++ b/acvm-repo/acvm_js/test/shared/memory_op.ts @@ -1,9 +1,9 @@ // See `memory_op_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, 255, 171, 10, 154, 16, 210, - 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, - 216, 151, 227, 188, 52, 187, 92, 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, - 16, 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, 255, 171, 10, 82, 176, 232, + 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, + 217, 190, 24, 236, 75, 113, 94, 146, 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, + 11, 161, 73, 39, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts b/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index 8ee0a067a3a..5401da76974 100644 --- a/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts +++ b/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,21 +1,24 @@ // See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, 30, 45, 218, 136, 141, 33, - 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, - 38, 63, 180, 243, 97, 3, 125, 173, 118, 131, 153, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 219, 10, 0, 32, 8, 67, 243, 214, 5, 250, 232, 62, 189, 69, 123, 176, 132, + 195, 116, 50, 149, 114, 107, 0, 97, 127, 116, 2, 75, 243, 2, 74, 53, 122, 202, 189, 211, 15, 106, 5, 13, 116, 238, 35, + 221, 81, 230, 61, 249, 37, 253, 250, 179, 79, 109, 218, 22, 67, 227, 173, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], [2, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], - [3, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [4, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [3, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [4, '0x0000000000000000000000000000000000000000000000000000000000000001'], + [5, '0x0000000000000000000000000000000000000000000000000000000000000000'], ]); export const expectedWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], [2, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], - [3, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [4, '0x0000000000000000000000000000000000000000000000000000000000000000'], - [5, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [6, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], + [3, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [4, '0x0000000000000000000000000000000000000000000000000000000000000001'], + [5, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [6, '0x0000000000000000000000000000000000000000000000000000000000000001'], + [7, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], + [8, '0x0000000000000000000000000000000000000000000000000000000000000000'], ]); diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 3403b0fe232..73f64d3d9d1 100644 --- a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -27,15 +27,18 @@ pub trait BlackBoxFunctionSolver { fn multi_scalar_mul( &self, points: &[FieldElement], - scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + scalars_lo: &[FieldElement], + scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError>; fn ec_add( &self, input1_x: &FieldElement, input1_y: &FieldElement, + input1_infinite: &FieldElement, input2_x: &FieldElement, input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError>; fn poseidon2_permutation( &self, _inputs: &[FieldElement], @@ -81,17 +84,20 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { fn multi_scalar_mul( &self, _points: &[FieldElement], - _scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + _scalars_lo: &[FieldElement], + _scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::MultiScalarMul)) } fn ec_add( &self, _input1_x: &FieldElement, _input1_y: &FieldElement, + _input1_infinite: &FieldElement, _input2_x: &FieldElement, _input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + _input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::EmbeddedCurveAdd)) } fn poseidon2_permutation( diff --git a/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs b/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs index 3f6d2ac86c1..901eb9d5a0f 100644 --- a/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs +++ b/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs @@ -10,9 +10,11 @@ use crate::BlackBoxResolutionError; /// Performs multi scalar multiplication of points with scalars. pub fn multi_scalar_mul( points: &[FieldElement], - scalars: &[FieldElement], -) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - if points.len() != scalars.len() { + scalars_lo: &[FieldElement], + scalars_hi: &[FieldElement], +) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + if points.len() != 3 * scalars_lo.len() || scalars_lo.len() != scalars_hi.len() { + dbg!(&points.len(), &scalars_lo.len(), &scalars_hi.len()); return Err(BlackBoxResolutionError::Failed( BlackBoxFunc::MultiScalarMul, "Points and scalars must have the same length".to_string(), @@ -21,21 +23,22 @@ pub fn multi_scalar_mul( let mut output_point = grumpkin::SWAffine::zero(); - for i in (0..points.len()).step_by(2) { - let point = create_point(points[i], points[i + 1]) - .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::MultiScalarMul, e))?; + for i in (0..points.len()).step_by(3) { + let point = + create_point(points[i], points[i + 1], points[i + 2] == FieldElement::from(1_u128)) + .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::MultiScalarMul, e))?; - let scalar_low: u128 = scalars[i].try_into_u128().ok_or_else(|| { + let scalar_low: u128 = scalars_lo[i / 3].try_into_u128().ok_or_else(|| { BlackBoxResolutionError::Failed( BlackBoxFunc::MultiScalarMul, - format!("Limb {} is not less than 2^128", scalars[i].to_hex()), + format!("Limb {} is not less than 2^128", scalars_lo[i].to_hex()), ) })?; - let scalar_high: u128 = scalars[i + 1].try_into_u128().ok_or_else(|| { + let scalar_high: u128 = scalars_hi[i / 3].try_into_u128().ok_or_else(|| { BlackBoxResolutionError::Failed( BlackBoxFunc::MultiScalarMul, - format!("Limb {} is not less than 2^128", scalars[i + 1].to_hex()), + format!("Limb {} is not less than 2^128", scalars_hi[i].to_hex()), ) })?; @@ -59,25 +62,33 @@ pub fn multi_scalar_mul( } if let Some((out_x, out_y)) = output_point.xy() { - Ok((FieldElement::from_repr(*out_x), FieldElement::from_repr(*out_y))) + Ok(( + FieldElement::from_repr(*out_x), + FieldElement::from_repr(*out_y), + FieldElement::from(output_point.is_zero() as u128), + )) } else { - Ok((FieldElement::zero(), FieldElement::zero())) + Ok((FieldElement::from(0_u128), FieldElement::from(0_u128), FieldElement::from(1_u128))) } } pub fn embedded_curve_add( - input1_x: FieldElement, - input1_y: FieldElement, - input2_x: FieldElement, - input2_y: FieldElement, -) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - let point1 = create_point(input1_x, input1_y) + input1: [FieldElement; 3], + input2: [FieldElement; 3], +) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + let point1 = create_point(input1[0], input1[1], input1[2] == FieldElement::one()) .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::EmbeddedCurveAdd, e))?; - let point2 = create_point(input2_x, input2_y) + let point2 = create_point(input2[0], input2[1], input2[2] == FieldElement::one()) .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::EmbeddedCurveAdd, e))?; let res = grumpkin::SWAffine::from(point1 + point2); if let Some((res_x, res_y)) = res.xy() { - Ok((FieldElement::from_repr(*res_x), FieldElement::from_repr(*res_y))) + Ok(( + FieldElement::from_repr(*res_x), + FieldElement::from_repr(*res_y), + FieldElement::from(res.is_zero() as u128), + )) + } else if res.is_zero() { + Ok((FieldElement::from(0_u128), FieldElement::from(0_u128), FieldElement::from(1_u128))) } else { Err(BlackBoxResolutionError::Failed( BlackBoxFunc::EmbeddedCurveAdd, @@ -86,7 +97,14 @@ pub fn embedded_curve_add( } } -fn create_point(x: FieldElement, y: FieldElement) -> Result { +fn create_point( + x: FieldElement, + y: FieldElement, + is_infinite: bool, +) -> Result { + if is_infinite { + return Ok(grumpkin::SWAffine::zero()); + } let point = grumpkin::SWAffine::new_unchecked(x.into_repr(), y.into_repr()); if !point.is_on_curve() { return Err(format!("Point ({}, {}) is not on curve", x.to_hex(), y.to_hex())); @@ -103,11 +121,11 @@ mod tests { use super::*; - fn get_generator() -> [FieldElement; 2] { + fn get_generator() -> [FieldElement; 3] { let generator = grumpkin::SWAffine::generator(); let generator_x = FieldElement::from_repr(*generator.x().unwrap()); let generator_y = FieldElement::from_repr(*generator.y().unwrap()); - [generator_x, generator_y] + [generator_x, generator_y, FieldElement::zero()] } #[test] @@ -115,7 +133,7 @@ mod tests { // We check that multiplying 1 by generator results in the generator let generator = get_generator(); - let res = multi_scalar_mul(&generator, &[FieldElement::one(), FieldElement::zero()])?; + let res = multi_scalar_mul(&generator, &[FieldElement::one()], &[FieldElement::zero()])?; assert_eq!(generator[0], res.0); assert_eq!(generator[1], res.1); @@ -125,9 +143,10 @@ mod tests { #[test] fn low_high_smoke_test() -> Result<(), BlackBoxResolutionError> { let points = get_generator(); - let scalars = [FieldElement::one(), FieldElement::from(2u128)]; + let scalars_lo = [FieldElement::one()]; + let scalars_hi = [FieldElement::from(2u128)]; - let res = multi_scalar_mul(&points, &scalars)?; + let res = multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; let x = "0702ab9c7038eeecc179b4f209991bcb68c7cb05bf4c532d804ccac36199c9a9"; let y = "23f10e9e43a3ae8d75d24154e796aae12ae7af546716e8f81a2564f1b5814130"; @@ -148,10 +167,10 @@ mod tests { "Limb 0000000000000000000000000000000100000000000000000000000000000000 is not less than 2^128".into(), )); - let res = multi_scalar_mul(&points, &[FieldElement::one(), invalid_limb]); + let res = multi_scalar_mul(&points, &[FieldElement::one()], &[invalid_limb]); assert_eq!(res, expected_error); - let res = multi_scalar_mul(&points, &[invalid_limb, FieldElement::one()]); + let res = multi_scalar_mul(&points, &[invalid_limb], &[FieldElement::one()]); assert_eq!(res, expected_error); } @@ -162,7 +181,7 @@ mod tests { let low = FieldElement::from_be_bytes_reduce(&x[16..32]); let high = FieldElement::from_be_bytes_reduce(&x[0..16]); - let res = multi_scalar_mul(&get_generator(), &[low, high]); + let res = multi_scalar_mul(&get_generator(), &[low], &[high]); assert_eq!( res, @@ -181,8 +200,9 @@ mod tests { let valid_scalar_high = FieldElement::zero(); let res = multi_scalar_mul( - &[invalid_point_x, invalid_point_y], - &[valid_scalar_low, valid_scalar_high], + &[invalid_point_x, invalid_point_y, FieldElement::zero()], + &[valid_scalar_low], + &[valid_scalar_high], ); assert_eq!( @@ -197,9 +217,10 @@ mod tests { #[test] fn throws_on_args_length_mismatch() { let points = get_generator(); - let scalars = [FieldElement::from(2u128)]; + let scalars_lo = [FieldElement::from(2u128)]; + let scalars_hi = []; - let res = multi_scalar_mul(&points, &scalars); + let res = multi_scalar_mul(&points, &scalars_lo, &scalars_hi); assert_eq!( res, @@ -215,7 +236,10 @@ mod tests { let x = FieldElement::from(1u128); let y = FieldElement::from(2u128); - let res = embedded_curve_add(x, y, x, y); + let res = embedded_curve_add( + [x, y, FieldElement::from(0u128)], + [x, y, FieldElement::from(0u128)], + ); assert_eq!( res, @@ -229,10 +253,14 @@ mod tests { #[test] fn output_of_msm_matches_add() -> Result<(), BlackBoxResolutionError> { let points = get_generator(); - let scalars = [FieldElement::from(2u128), FieldElement::zero()]; - - let msm_res = multi_scalar_mul(&points, &scalars)?; - let add_res = embedded_curve_add(points[0], points[1], points[0], points[1])?; + let scalars_lo = [FieldElement::from(2u128)]; + let scalars_hi = [FieldElement::zero()]; + + let msm_res = multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; + let add_res = embedded_curve_add( + [points[0], points[1], FieldElement::from(0u128)], + [points[0], points[1], FieldElement::from(0u128)], + )?; assert_eq!(msm_res.0, add_res.0); assert_eq!(msm_res.1, add_res.1); diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index eebc65db141..43b86e083d5 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -82,19 +82,25 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { fn multi_scalar_mul( &self, points: &[FieldElement], - scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - multi_scalar_mul(points, scalars) + scalars_lo: &[FieldElement], + scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + multi_scalar_mul(points, scalars_lo, scalars_hi) } fn ec_add( &self, input1_x: &FieldElement, input1_y: &FieldElement, + input1_infinite: &FieldElement, input2_x: &FieldElement, input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - embedded_curve_add(*input1_x, *input1_y, *input2_x, *input2_y) + input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + embedded_curve_add( + [*input1_x, *input1_y, *input1_infinite], + [*input2_x, *input2_y, *input2_infinite], + ) } fn poseidon2_permutation( diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index 15abc19ed90..3887092a8c2 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -83,8 +83,10 @@ pub enum BlackBoxOp { EmbeddedCurveAdd { input1_x: MemoryAddress, input1_y: MemoryAddress, + input1_infinite: MemoryAddress, input2_x: MemoryAddress, input2_y: MemoryAddress, + input2_infinite: MemoryAddress, result: HeapArray, }, BigIntAdd { @@ -126,4 +128,9 @@ pub enum BlackBoxOp { hash_values: HeapVector, output: HeapArray, }, + ToRadix { + input: MemoryAddress, + radix: u32, + output: HeapArray, + }, } diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index c999b5bf330..ebaa6976283 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -5,6 +5,7 @@ use acvm_blackbox_solver::{ aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; +use num_bigint::BigUint; use crate::memory::MemoryValue; use crate::Memory; @@ -156,22 +157,63 @@ pub(crate) fn evaluate_black_box( Ok(()) } BlackBoxOp::MultiScalarMul { points, scalars, outputs: result } => { - let points: Vec = - read_heap_vector(memory, points).iter().map(|x| x.try_into().unwrap()).collect(); + let points: Vec = read_heap_vector(memory, points) + .iter() + .enumerate() + .map(|(i, x)| { + if i % 3 == 2 { + let is_infinite: bool = x.try_into().unwrap(); + FieldElement::from(is_infinite as u128) + } else { + x.try_into().unwrap() + } + }) + .collect(); let scalars: Vec = read_heap_vector(memory, scalars).iter().map(|x| x.try_into().unwrap()).collect(); - - let (x, y) = solver.multi_scalar_mul(&points, &scalars)?; - memory.write_slice(memory.read_ref(result.pointer), &[x.into(), y.into()]); + let mut scalars_lo = Vec::with_capacity(scalars.len() / 2); + let mut scalars_hi = Vec::with_capacity(scalars.len() / 2); + for (i, scalar) in scalars.iter().enumerate() { + if i % 2 == 0 { + scalars_lo.push(*scalar); + } else { + scalars_hi.push(*scalar); + } + } + let (x, y, is_infinite) = solver.multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; + memory.write_slice( + memory.read_ref(result.pointer), + &[x.into(), y.into(), is_infinite.into()], + ); Ok(()) } - BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { + BlackBoxOp::EmbeddedCurveAdd { + input1_x, + input1_y, + input2_x, + input2_y, + result, + input1_infinite, + input2_infinite, + } => { let input1_x = memory.read(*input1_x).try_into().unwrap(); let input1_y = memory.read(*input1_y).try_into().unwrap(); + let input1_infinite: bool = memory.read(*input1_infinite).try_into().unwrap(); let input2_x = memory.read(*input2_x).try_into().unwrap(); let input2_y = memory.read(*input2_y).try_into().unwrap(); - let (x, y) = solver.ec_add(&input1_x, &input1_y, &input2_x, &input2_y)?; - memory.write_slice(memory.read_ref(result.pointer), &[x.into(), y.into()]); + let input2_infinite: bool = memory.read(*input2_infinite).try_into().unwrap(); + let (x, y, infinite) = solver.ec_add( + &input1_x, + &input1_y, + &input1_infinite.into(), + &input2_x, + &input2_y, + &input2_infinite.into(), + )?; + memory.write_slice( + memory.read_ref(result.pointer), + &[x.into(), y.into(), infinite.into()], + ); Ok(()) } BlackBoxOp::PedersenCommitment { inputs, domain_separator, output } => { @@ -295,6 +337,25 @@ pub(crate) fn evaluate_black_box( memory.write_slice(memory.read_ref(output.pointer), &state); Ok(()) } + BlackBoxOp::ToRadix { input, radix, output } => { + let input: FieldElement = + memory.read(*input).try_into().expect("ToRadix input not a field"); + + let mut input = BigUint::from_bytes_be(&input.to_be_bytes()); + let radix = BigUint::from(*radix); + + let mut limbs: Vec = Vec::with_capacity(output.size); + + for _ in 0..output.size { + let limb = &input % &radix; + limbs.push(FieldElement::from_be_bytes_reduce(&limb.to_bytes_be()).into()); + input /= &radix; + } + + memory.write_slice(memory.read_ref(output.pointer), &limbs); + + Ok(()) + } } } @@ -321,6 +382,7 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::BigIntToLeBytes { .. } => BlackBoxFunc::BigIntToLeBytes, BlackBoxOp::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxOp::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, + BlackBoxOp::ToRadix { .. } => unreachable!("ToRadix is not an ACIR BlackBoxFunc"), } } diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 17ae999fb8f..5326920511b 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -1,17 +1,20 @@ mod transforms; mod utils; +use noirc_errors::Location; use transforms::{ compute_note_hash_and_nullifier::inject_compute_note_hash_and_nullifier, contract_interface::{ generate_contract_interface, stub_function, update_fn_signatures_in_contract_interface, }, events::{generate_selector_impl, transform_events}, - functions::{export_fn_abi, transform_function, transform_unconstrained}, + functions::{ + check_for_public_args, export_fn_abi, transform_function, transform_unconstrained, + }, note_interface::{generate_note_interface_impl, inject_note_exports}, storage::{ assign_storage_slots, check_for_storage_definition, check_for_storage_implementation, - generate_storage_implementation, generate_storage_layout, + generate_storage_implementation, generate_storage_layout, inject_context_in_storage, }, }; @@ -64,6 +67,7 @@ fn transform( for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { if transform_module( crate_id, + &file_id, context, &mut submodule.contents, submodule.name.0.contents.as_str(), @@ -84,6 +88,7 @@ fn transform( /// Returns true if an annotated node is found, false otherwise fn transform_module( crate_id: &CrateId, + file_id: &FileId, context: &HirContext, module: &mut SortedModule, module_name: &str, @@ -97,6 +102,7 @@ fn transform_module( let storage_defined = maybe_storage_struct_name.is_some(); if let Some(ref storage_struct_name) = maybe_storage_struct_name { + inject_context_in_storage(module)?; if !check_for_storage_implementation(module, storage_struct_name) { generate_storage_implementation(module, storage_struct_name)?; } @@ -132,6 +138,7 @@ fn transform_module( let mut is_initializer = false; let mut is_internal = false; let mut insert_init_check = has_initializer; + let mut is_static = false; for secondary_attribute in func.def.attributes.secondary.clone() { if is_custom_attribute(&secondary_attribute, "aztec(private)") { @@ -148,6 +155,9 @@ fn transform_module( } else if is_custom_attribute(&secondary_attribute, "aztec(public-vm)") { is_public_vm = true; } + if is_custom_attribute(&secondary_attribute, "aztec(view)") { + is_static = true; + } } // Apply transformations to the function based on collected attributes @@ -159,7 +169,8 @@ fn transform_module( } else { "Public" }; - stubs.push(stub_function(fn_type, func)); + let stub_src = stub_function(fn_type, func, is_static); + stubs.push((stub_src, Location { file: *file_id, span: func.name_ident().span() })); export_fn_abi(&mut module.types, func)?; transform_function( @@ -169,6 +180,7 @@ fn transform_module( is_initializer, insert_init_check, is_internal, + is_static, )?; has_transformed_module = true; } else if storage_defined && func.def.is_unconstrained { @@ -180,7 +192,7 @@ fn transform_module( if has_transformed_module { // We only want to run these checks if the macro processor has found the module to be an Aztec contract. - let private_functions_count = module + let private_functions: Vec<_> = module .functions .iter() .filter(|func| { @@ -190,9 +202,27 @@ fn transform_module( .iter() .any(|attr| is_custom_attribute(attr, "aztec(private)")) }) - .count(); + .collect(); + + let public_functions: Vec<_> = module + .functions + .iter() + .filter(|func| { + func.def + .attributes + .secondary + .iter() + .any(|attr| is_custom_attribute(attr, "aztec(public)")) + }) + .collect(); + + let private_function_count = private_functions.len(); + + check_for_public_args(&private_functions)?; + + check_for_public_args(&public_functions)?; - if private_functions_count > MAX_CONTRACT_PRIVATE_FUNCTIONS { + if private_function_count > MAX_CONTRACT_PRIVATE_FUNCTIONS { return Err(AztecMacroError::ContractHasTooManyPrivateFunctions { span: Span::default(), }); diff --git a/aztec_macros/src/transforms/contract_interface.rs b/aztec_macros/src/transforms/contract_interface.rs index 1afe0a30068..bb63357d251 100644 --- a/aztec_macros/src/transforms/contract_interface.rs +++ b/aztec_macros/src/transforms/contract_interface.rs @@ -1,4 +1,5 @@ -use noirc_frontend::ast::{NoirFunction, UnresolvedTypeData}; +use noirc_errors::Location; +use noirc_frontend::ast::{Ident, NoirFunction, UnresolvedTypeData}; use noirc_frontend::{ graph::CrateId, macros_api::{FileId, HirContext, HirExpression, HirLiteral, HirStatement}, @@ -39,7 +40,7 @@ use crate::utils::{ // } // // The selector placeholder has to be replaced with the actual function signature after type checking in the next macro pass -pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { +pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call: bool) -> String { let fn_name = func.name().to_string(); let fn_parameters = func .parameters() @@ -59,6 +60,7 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { let parameters = func.parameters(); let is_void = if matches!(fn_return_type.typ, UnresolvedTypeData::Unit) { "Void" } else { "" }; + let is_static = if is_static_call { "Static" } else { "" }; let return_type_hint = if is_void == "Void" { "".to_string() } else { @@ -75,7 +77,8 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { for i in 0..{0}.len() {{ args_acc = args_acc.append(hash_{0}[i].as_slice()); }}\n", - param_name, typ.typ + param_name, + typ.typ.to_string().replace("plain::", "") ) } _ => { @@ -100,18 +103,18 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { let fn_body = format!( "{} - dep::aztec::context::{}{}CallInterface {{ + dep::aztec::context::{}{}{}CallInterface {{ target_contract: self.target_contract, selector: {}, args_hash, }}", - args_hash, aztec_visibility, is_void, fn_selector, + args_hash, aztec_visibility, is_static, is_void, fn_selector, ); format!( - "pub fn {}(self, {}) -> dep::aztec::context::{}{}CallInterface{} {{ + "pub fn {}(self, {}) -> dep::aztec::context::{}{}{}CallInterface{} {{ {} }}", - fn_name, fn_parameters, aztec_visibility, is_void, return_type_hint, fn_body + fn_name, fn_parameters, aztec_visibility, is_static, is_void, return_type_hint, fn_body ) } else { let args = format!( @@ -122,19 +125,19 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { ); let fn_body = format!( "{} - dep::aztec::context::Avm{}CallInterface {{ + dep::aztec::context::Avm{}{}CallInterface {{ target_contract: self.target_contract, selector: {}, args: args_acc, gas_opts: dep::aztec::context::gas::GasOpts::default(), }}", - args, is_void, fn_selector, + args, is_static, is_void, fn_selector, ); format!( - "pub fn {}(self, {}) -> dep::aztec::context::Avm{}CallInterface{} {{ + "pub fn {}(self, {}) -> dep::aztec::context::Avm{}{}CallInterface{} {{ {} }}", - fn_name, fn_parameters, is_void, return_type_hint, fn_body + fn_name, fn_parameters, is_static, is_void, return_type_hint, fn_body ) } } @@ -145,7 +148,7 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { pub fn generate_contract_interface( module: &mut SortedModule, module_name: &str, - stubs: &[String], + stubs: &[(String, Location)], ) -> Result<(), AztecMacroError> { let contract_interface = format!( " @@ -171,7 +174,7 @@ pub fn generate_contract_interface( }} ", module_name, - stubs.join("\n"), + stubs.iter().map(|(src, _)| src.to_owned()).collect::>().join("\n"), ); let (contract_interface_ast, errors) = parse_program(&contract_interface); @@ -181,8 +184,27 @@ pub fn generate_contract_interface( } let mut contract_interface_ast = contract_interface_ast.into_sorted(); + let mut impl_with_locations = contract_interface_ast.impls.pop().unwrap(); + + impl_with_locations.methods = impl_with_locations + .methods + .iter() + .enumerate() + .map(|(i, (method, orig_span))| { + if method.name() == "at" { + (method.clone(), *orig_span) + } else { + let (_, new_location) = stubs[i]; + let mut modified_method = method.clone(); + modified_method.def.name = + Ident::new(modified_method.name().to_string(), new_location.span); + (modified_method, *orig_span) + } + }) + .collect(); + module.types.push(contract_interface_ast.types.pop().unwrap()); - module.impls.push(contract_interface_ast.impls.pop().unwrap()); + module.impls.push(impl_with_locations); module.functions.push(contract_interface_ast.functions.pop().unwrap()); Ok(()) diff --git a/aztec_macros/src/transforms/functions.rs b/aztec_macros/src/transforms/functions.rs index 39d709ef520..00e9c444421 100644 --- a/aztec_macros/src/transforms/functions.rs +++ b/aztec_macros/src/transforms/functions.rs @@ -10,6 +10,7 @@ use noirc_frontend::ast::{ use noirc_frontend::{macros_api::FieldElement, parse_program}; +use crate::utils::ast_utils::member_access; use crate::{ chained_dep, chained_path, utils::{ @@ -33,6 +34,7 @@ pub fn transform_function( is_initializer: bool, insert_init_check: bool, is_internal: bool, + is_static: bool, ) -> Result<(), AztecMacroError> { let context_name = format!("{}Context", ty); let inputs_name = format!("{}ContextInputs", ty); @@ -40,6 +42,12 @@ pub fn transform_function( let is_avm = ty == "Avm"; let is_private = ty == "Private"; + // Force a static context if the function is static + if is_static { + let is_static_check = create_static_check(func.name(), is_avm); + func.def.body.statements.insert(0, is_static_check); + } + // Add check that msg sender equals this address and flag function as internal if is_internal { let is_internal_check = create_internal_check(func.name()); @@ -59,7 +67,7 @@ pub fn transform_function( // Add access to the storage struct if let Some(storage_struct_name) = storage_struct_name { - let storage_def = abstract_storage(storage_struct_name, &ty.to_lowercase(), false); + let storage_def = abstract_storage(storage_struct_name, false); func.def.body.statements.insert(0, storage_def); } @@ -213,10 +221,7 @@ pub fn export_fn_abi( /// /// This will allow developers to access their contract' storage struct in unconstrained functions pub fn transform_unconstrained(func: &mut NoirFunction, storage_struct_name: String) { - func.def - .body - .statements - .insert(0, abstract_storage(storage_struct_name, "Unconstrained", true)); + func.def.body.statements.insert(0, abstract_storage(storage_struct_name, true)); } /// Helper function that returns what the private context would look like in the ast @@ -275,6 +280,31 @@ fn create_mark_as_initialized(ty: &str) -> Statement { ))) } +/// Forces a static context for a function, ensuring that no state modifications are allowed +/// +/// ```noir +/// assert(context.inputs.call_context.is_static_call == true, "Function can only be called statically") +/// ``` +fn create_static_check(fname: &str, is_avm: bool) -> Statement { + let is_static_call_expr = if !is_avm { + ["inputs", "call_context", "is_static_call"] + .iter() + .fold(variable("context"), |acc, member| member_access(acc, member)) + } else { + ["inputs", "is_static_call"] + .iter() + .fold(variable("context"), |acc, member| member_access(acc, member)) + }; + make_statement(StatementKind::Constrain(ConstrainStatement( + make_eq(is_static_call_expr, expression(ExpressionKind::Literal(Literal::Bool(true)))), + Some(expression(ExpressionKind::Literal(Literal::Str(format!( + "Function {} can only be called statically", + fname + ))))), + ConstrainKind::Assert, + ))) +} + /// Creates a check for internal functions ensuring that the caller is self. /// /// ```noir @@ -564,7 +594,7 @@ fn abstract_return_values(func: &NoirFunction) -> Result>, /// ```noir /// #[aztec(private)] /// fn lol() { -/// let storage = Storage::init(Context::private(context)); +/// let storage = Storage::init(context); /// } /// ``` /// @@ -572,33 +602,28 @@ fn abstract_return_values(func: &NoirFunction) -> Result>, /// ```noir /// #[aztec(public)] /// fn lol() { -/// let storage = Storage::init(Context::public(context)); +/// let storage = Storage::init(context); /// } /// ``` /// /// For unconstrained functions: /// ```noir /// unconstrained fn lol() { -/// let storage = Storage::init(Context::none()); +/// let storage = Storage::init(()); /// } -fn abstract_storage(storage_struct_name: String, typ: &str, unconstrained: bool) -> Statement { - let init_context_call = if unconstrained { - call( - variable_path(chained_dep!("aztec", "context", "Context", "none")), // Path - vec![], // args - ) +fn abstract_storage(storage_struct_name: String, unconstrained: bool) -> Statement { + let context_expr = if unconstrained { + // Note that the literal unit type (i.e. '()') is not the same as a tuple with zero elements + expression(ExpressionKind::Literal(Literal::Unit)) } else { - call( - variable_path(chained_dep!("aztec", "context", "Context", typ)), // Path - vec![mutable_reference("context")], // args - ) + mutable_reference("context") }; assignment( "storage", // Assigned to call( variable_path(chained_path!(storage_struct_name.as_str(), "init")), // Path - vec![init_context_call], // args + vec![context_expr], // args ), ) } @@ -792,3 +817,18 @@ fn add_cast_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { vec![cast_operation], // args ))) } + +/** + * Takes a vector of functions and checks for the presence of arguments with Public visibility + * Returns AztecMAcroError::PublicArgsDisallowed if found + */ +pub fn check_for_public_args(functions: &[&NoirFunction]) -> Result<(), AztecMacroError> { + for func in functions { + for param in &func.def.parameters { + if param.visibility == Visibility::Public { + return Err(AztecMacroError::PublicArgsDisallowed { span: func.span() }); + } + } + } + Ok(()) +} diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs index 1e3cc011715..0a210934827 100644 --- a/aztec_macros/src/transforms/storage.rs +++ b/aztec_macros/src/transforms/storage.rs @@ -1,7 +1,7 @@ use noirc_errors::Span; use noirc_frontend::ast::{ BlockExpression, Expression, ExpressionKind, FunctionDefinition, Ident, Literal, NoirFunction, - NoirStruct, PathKind, Pattern, StatementKind, TypeImpl, UnresolvedType, UnresolvedTypeData, + NoirStruct, Pattern, StatementKind, TypeImpl, UnresolvedType, UnresolvedTypeData, }; use noirc_frontend::{ graph::CrateId, @@ -16,7 +16,7 @@ use noirc_frontend::{ }; use crate::{ - chained_dep, chained_path, + chained_path, utils::{ ast_utils::{ call, expression, ident, ident_path, is_custom_attribute, lambda, make_statement, @@ -48,7 +48,58 @@ pub fn check_for_storage_definition( Ok(result.iter().map(|&r#struct| r#struct.name.0.contents.clone()).next()) } -// Check to see if the user has defined a storage struct +// Injects the Context generic in each of the Storage struct fields to avoid boilerplate, +// taking maps into account (including nested maps) +fn inject_context_in_storage_field(field: &mut UnresolvedType) -> Result<(), AztecMacroError> { + match &mut field.typ { + UnresolvedTypeData::Named(path, generics, _) => { + generics.push(make_type(UnresolvedTypeData::Named( + ident_path("Context"), + vec![], + false, + ))); + match path.segments.last().unwrap().0.contents.as_str() { + "Map" => inject_context_in_storage_field(&mut generics[1]), + _ => Ok(()), + } + } + _ => Err(AztecMacroError::CouldNotInjectContextGenericInStorage { + secondary_message: Some(format!("Unsupported type: {:?}", field.typ)), + }), + } +} + +// Injects the Context generic in the storage struct to avoid boilerplate +// Transforms this: +// struct Storage { +// a_var: SomeStoragePrimitive, +// a_map: Map>, +// } +// +// Into this: +// +// struct Storage { +// a_var: SomeStoragePrimitive, +// a_map: Map, Context>, +// } +pub fn inject_context_in_storage(module: &mut SortedModule) -> Result<(), AztecMacroError> { + let storage_struct = module + .types + .iter_mut() + .find(|r#struct| { + r#struct.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) + }) + .unwrap(); + storage_struct.generics.push(ident("Context")); + storage_struct + .fields + .iter_mut() + .map(|(_, field)| inject_context_in_storage_field(field)) + .collect::, _>>()?; + Ok(()) +} + +// Check to see if the user has defined an impl for the storage struct pub fn check_for_storage_implementation( module: &SortedModule, storage_struct_name: &String, @@ -79,22 +130,22 @@ pub fn generate_storage_field_constructor( variable("context"), slot, lambda( + // This lambda will be equivalent to the following + // | context, slot | { T::new(context, slot) } + // Since the `new` function has type bindings for its arguments, we don't specify the types + // of either context nor slot, and avoid that way having to deal with the generic context + // type. vec![ - ( - pattern("context"), - make_type(UnresolvedTypeData::Named( - chained_dep!("aztec", "context", "Context"), - vec![], - true, - )), - ), + (pattern("context"), make_type(UnresolvedTypeData::Unspecified)), ( Pattern::Identifier(ident("slot")), - make_type(UnresolvedTypeData::FieldElement), + make_type(UnresolvedTypeData::Unspecified), ), ], generate_storage_field_constructor( - &(type_ident.clone(), generics.iter().last().unwrap().clone()), + // Map is expected to have three generic parameters: key, value and context (i.e. + // Map. Here `get(1)` fetches the value type. + &(type_ident.clone(), generics.get(1).unwrap().clone()), variable("slot"), )?, ), @@ -113,15 +164,15 @@ pub fn generate_storage_field_constructor( // Generates the Storage implementation block from the Storage struct definition if it does not exist /// From: /// -/// struct Storage { -/// a_map: Map>, -/// a_nested_map: Map>>, -/// a_field: SomeStoragePrimitive, +/// struct Storage { +/// a_map: Map, Context>, +/// a_nested_map: Map, Context>, Context>, +/// a_field: SomeStoragePrimitive, /// } /// /// To: /// -/// impl Storage { +/// impl Storage { /// fn init(context: Context) -> Self { /// Storage { /// a_map: Map::new(context, 0, |context, slot| { @@ -167,17 +218,15 @@ pub fn generate_storage_implementation( ExpressionKind::constructor((chained_path!(storage_struct_name), field_constructors)), ))); + // This is the type over which the impl is generic. + let generic_context_ident = ident("Context"); + let generic_context_type = + make_type(UnresolvedTypeData::Named(ident_path("Context"), vec![], true)); + let init = NoirFunction::normal(FunctionDefinition::normal( &ident("init"), &vec![], - &[( - ident("context"), - make_type(UnresolvedTypeData::Named( - chained_dep!("aztec", "context", "Context"), - vec![], - true, - )), - )], + &[(ident("context"), generic_context_type.clone())], &BlockExpression { statements: vec![storage_constructor_statement] }, &[], &return_type(chained_path!("Self")), @@ -185,11 +234,16 @@ pub fn generate_storage_implementation( let storage_impl = TypeImpl { object_type: UnresolvedType { - typ: UnresolvedTypeData::Named(chained_path!(storage_struct_name), vec![], true), + typ: UnresolvedTypeData::Named( + chained_path!(storage_struct_name), + vec![generic_context_type.clone()], + true, + ), span: Some(Span::default()), }, type_span: Span::default(), - generics: vec![], + generics: vec![generic_context_ident], + methods: vec![(init, Span::default())], }; module.impls.push(storage_impl); @@ -341,7 +395,9 @@ pub fn assign_storage_slots( let mut storage_slot: u64 = 1; for (index, (_, expr_id)) in storage_constructor_expression.fields.iter().enumerate() { - let fields = storage_struct.borrow().get_fields(&[]); + let fields = storage_struct + .borrow() + .get_fields(&storage_constructor_expression.struct_generics); let (field_name, field_type) = fields.get(index).unwrap(); let new_call_expression = match context.def_interner.expression(expr_id) { HirExpression::Call(hir_call_expression) => Ok(hir_call_expression), diff --git a/aztec_macros/src/utils/ast_utils.rs b/aztec_macros/src/utils/ast_utils.rs index ba51090c2be..4706be2df25 100644 --- a/aztec_macros/src/utils/ast_utils.rs +++ b/aztec_macros/src/utils/ast_utils.rs @@ -1,9 +1,9 @@ use noirc_errors::{Span, Spanned}; use noirc_frontend::ast::{ BinaryOpKind, CallExpression, CastExpression, Expression, ExpressionKind, FunctionReturnType, - Ident, IndexExpression, InfixExpression, Lambda, LetStatement, MethodCallExpression, - NoirTraitImpl, Path, Pattern, PrefixExpression, Statement, StatementKind, TraitImplItem, - UnaryOp, UnresolvedType, UnresolvedTypeData, + Ident, IndexExpression, InfixExpression, Lambda, LetStatement, MemberAccessExpression, + MethodCallExpression, NoirTraitImpl, Path, Pattern, PrefixExpression, Statement, StatementKind, + TraitImplItem, UnaryOp, UnresolvedType, UnresolvedTypeData, }; use noirc_frontend::token::SecondaryAttribute; @@ -126,6 +126,13 @@ pub fn make_statement(kind: StatementKind) -> Statement { Statement { span: Span::default(), kind } } +pub fn member_access(lhs: Expression, member: &str) -> Expression { + expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { + lhs, + rhs: ident(member), + }))) +} + #[macro_export] macro_rules! chained_path { ( $base:expr ) => { diff --git a/aztec_macros/src/utils/errors.rs b/aztec_macros/src/utils/errors.rs index db86012a007..51aea3d052f 100644 --- a/aztec_macros/src/utils/errors.rs +++ b/aztec_macros/src/utils/errors.rs @@ -16,10 +16,12 @@ pub enum AztecMacroError { CouldNotImplementNoteInterface { span: Option, secondary_message: Option }, MultipleStorageDefinitions { span: Option }, CouldNotExportStorageLayout { span: Option, secondary_message: Option }, + CouldNotInjectContextGenericInStorage { secondary_message: Option }, CouldNotExportFunctionAbi { span: Option, secondary_message: Option }, CouldNotGenerateContractInterface { secondary_message: Option }, EventError { span: Span, message: String }, UnsupportedAttributes { span: Span, secondary_message: Option }, + PublicArgsDisallowed { span: Span }, } impl From for MacroError { @@ -75,6 +77,11 @@ impl From for MacroError { secondary_message, span, }, + AztecMacroError::CouldNotInjectContextGenericInStorage { secondary_message } => MacroError { + primary_message: "Could not inject context generic in storage".to_string(), + secondary_message, + span: None + }, AztecMacroError::CouldNotExportFunctionAbi { secondary_message, span } => MacroError { primary_message: "Could not generate and export function abi".to_string(), secondary_message, @@ -95,6 +102,11 @@ impl From for MacroError { secondary_message, span: Some(span), }, + AztecMacroError::PublicArgsDisallowed { span } => MacroError { + primary_message: "Aztec functions can't have public arguments".to_string(), + secondary_message: None, + span: Some(span), + }, } } } diff --git a/compiler/integration-tests/scripts/codegen-verifiers.sh b/compiler/integration-tests/scripts/codegen-verifiers.sh index e377a3ee3f8..abc26c4c465 100644 --- a/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,26 +1,34 @@ #!/usr/bin/env bash +NARGO_BACKEND_PATH=${NARGO_BACKEND_PATH:-bb} + self_path=$(dirname "$(readlink -f "$0")") repo_root=$self_path/../../.. -# Run codegen-verifier for 1_mul +# We want to move all the contracts to the root of compiler/integration-tests +contracts_dir=$self_path/../contracts +rm -rf $contracts_dir +mkdir $contracts_dir + +KEYS=$(mktemp -d) + +# Codegen verifier contract for 1_mul mul_dir=$repo_root/test_programs/execution_success/1_mul -nargo --program-dir $mul_dir codegen-verifier +nargo --program-dir $mul_dir compile +$NARGO_BACKEND_PATH write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul +$NARGO_BACKEND_PATH contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol -# Run codegen-verifier for assert_statement +# Codegen verifier contract for assert_statement assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement -nargo --program-dir $assert_statement_dir codegen-verifier +nargo --program-dir $assert_statement_dir compile +$NARGO_BACKEND_PATH write_vk -b $assert_statement_dir/target/assert_statement.json -o $KEYS/assert_statement +$NARGO_BACKEND_PATH contract -k $KEYS/assert_statement -o $contracts_dir/assert_statement.sol -# Run codegen-verifier for recursion +# Codegen verifier contract for recursion recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion -nargo --program-dir $recursion_dir codegen-verifier - -# Copy compiled contracts from the root of compiler/integration-tests -contracts_dir=$self_path/../contracts -rm -rf $contracts_dir -mkdir $contracts_dir +nargo --program-dir $recursion_dir compile +$NARGO_BACKEND_PATH write_vk -b $recursion_dir/target/recursion.json -o $KEYS/recursion +$NARGO_BACKEND_PATH contract -k $KEYS/recursion ./ -o $contracts_dir/recursion.sol -cp $mul_dir/contract/1_mul/plonk_vk.sol $contracts_dir/1_mul.sol -cp $assert_statement_dir/contract/assert_statement/plonk_vk.sol $contracts_dir/assert_statement.sol -cp $recursion_dir/contract/recursion/plonk_vk.sol $contracts_dir/recursion.sol +rm -rf $KEYS \ No newline at end of file diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 801c0b685a9..d7368f299b8 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -84,10 +84,6 @@ pub struct CompileOptions { #[arg(long, conflicts_with = "deny_warnings")] pub silence_warnings: bool, - /// Output ACIR gzipped bytecode instead of the JSON artefact - #[arg(long, hide = true)] - pub only_acir: bool, - /// Disables the builtin Aztec macros being used in the compiler #[arg(long, hide = true)] pub disable_macros: bool, diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index d982d864d06..f56c5daf315 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -207,15 +207,17 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EmbeddedCurveAdd => { if let ( - [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y)], + [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input1_infinite), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y), BrilligVariable::SingleAddr(input2_infinite)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveAdd { input1_x: input1_x.address, input1_y: input1_y.address, + input1_infinite: input1_infinite.address, input2_x: input2_x.address, input2_y: input2_y.address, + input2_infinite: input2_infinite.address, result: result_array.to_heap_array(), }); } else { @@ -233,9 +235,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::RANGE => unreachable!( "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" ), - BlackBoxFunc::RecursiveAggregation => unimplemented!( - "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" - ), + BlackBoxFunc::RecursiveAggregation => {} BlackBoxFunc::BigIntAdd => { if let ( [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index f660c8e0b7a..6a4f9f5cc0e 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -488,8 +488,22 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); - let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); + + let radix: u32 = dfg + .get_numeric_constant(arguments[1]) + .expect("Radix should be known") + .try_to_u64() + .expect("Radix should fit in u64") + .try_into() + .expect("Radix should be u32"); + + let limb_count: usize = dfg + .get_numeric_constant(arguments[2]) + .expect("Limb count should be known") + .try_to_u64() + .expect("Limb count should fit in u64") + .try_into() + .expect("Limb count should fit in usize"); let results = dfg.instruction_results(instruction_id); @@ -511,7 +525,8 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context.cast_instruction(target_len, limb_count); + self.brillig_context + .usize_const_instruction(target_len.address, limb_count.into()); self.brillig_context.codegen_to_radix( source, @@ -524,7 +539,13 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count: usize = dfg + .get_numeric_constant(arguments[1]) + .expect("Limb count should be known") + .try_to_u64() + .expect("Limb count should fit in u64") + .try_into() + .expect("Limb count should fit in usize"); let results = dfg.instruction_results(instruction_id); @@ -549,21 +570,18 @@ impl<'block> BrilligBlock<'block> { BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; - let radix = self.brillig_context.make_constant_instruction(2_usize.into(), 32); - // Update the user-facing slice length - self.brillig_context.cast_instruction(target_len, limb_count); + self.brillig_context + .usize_const_instruction(target_len.address, limb_count.into()); self.brillig_context.codegen_to_radix( source, target_vector, - radix, + 2, limb_count, matches!(endianness, Endian::Big), 1, ); - - self.brillig_context.deallocate_single_addr(radix); } _ => { unreachable!("unsupported function call type {:?}", dfg[*func]) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index fadcdb22c15..2bd57dc9486 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -170,18 +170,21 @@ pub(crate) mod tests { fn multi_scalar_mul( &self, _points: &[FieldElement], - _scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((4_u128.into(), 5_u128.into())) + _scalars_lo: &[FieldElement], + _scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + Ok((4_u128.into(), 5_u128.into(), 0_u128.into())) } fn ec_add( &self, _input1_x: &FieldElement, _input1_y: &FieldElement, + _input1_infinite: &FieldElement, _input2_x: &FieldElement, _input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + _input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { panic!("Path not trodden by this test") } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index ab756217bcd..58166554e1d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -1,6 +1,7 @@ -use acvm::FieldElement; - -use crate::brillig::brillig_ir::BrilligBinaryOp; +use acvm::{ + acir::brillig::{BlackBoxOp, HeapArray}, + FieldElement, +}; use super::{ brillig_variable::{BrilligVector, SingleAddrVariable}, @@ -36,57 +37,46 @@ impl BrilligContext { &mut self, source_field: SingleAddrVariable, target_vector: BrilligVector, - radix: SingleAddrVariable, - limb_count: SingleAddrVariable, + radix: u32, + limb_count: usize, big_endian: bool, limb_bit_size: u32, ) { assert!(source_field.bit_size == FieldElement::max_num_bits()); - assert!(radix.bit_size == 32); - assert!(limb_count.bit_size == 32); - let radix_as_field = - SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); - self.cast_instruction(radix_as_field, radix); - self.cast_instruction(SingleAddrVariable::new_usize(target_vector.size), limb_count); + self.usize_const_instruction(target_vector.size, limb_count.into()); self.usize_const_instruction(target_vector.rc, 1_usize.into()); self.codegen_allocate_array(target_vector.pointer, target_vector.size); - let shifted_field = - SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); - self.mov_instruction(shifted_field.address, source_field.address); + self.black_box_op_instruction(BlackBoxOp::ToRadix { + input: source_field.address, + radix, + output: HeapArray { pointer: target_vector.pointer, size: limb_count }, + }); let limb_field = SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); let limb_casted = SingleAddrVariable::new(self.allocate_register(), limb_bit_size); - self.codegen_loop(target_vector.size, |ctx, iterator_register| { - // Compute the modulus - ctx.binary_instruction( - shifted_field, - radix_as_field, - limb_field, - BrilligBinaryOp::Modulo, - ); - // Cast it - ctx.cast_instruction(limb_casted, limb_field); - // Write it - ctx.codegen_array_set(target_vector.pointer, iterator_register, limb_casted.address); - // Integer div the field - ctx.binary_instruction( - shifted_field, - radix_as_field, - shifted_field, - BrilligBinaryOp::UnsignedDiv, - ); - }); + if limb_bit_size != FieldElement::max_num_bits() { + self.codegen_loop(target_vector.size, |ctx, iterator_register| { + // Read the limb + ctx.codegen_array_get(target_vector.pointer, iterator_register, limb_field.address); + // Cast it + ctx.cast_instruction(limb_casted, limb_field); + // Write it + ctx.codegen_array_set( + target_vector.pointer, + iterator_register, + limb_casted.address, + ); + }); + } // Deallocate our temporary registers - self.deallocate_single_addr(shifted_field); self.deallocate_single_addr(limb_field); self.deallocate_single_addr(limb_casted); - self.deallocate_single_addr(radix_as_field); if big_endian { self.codegen_reverse_vector_in_place(target_vector); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 667ccf6ddbe..def91f82bfd 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -334,7 +334,9 @@ impl DebugShow { outputs ); } - BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { + BlackBoxOp::EmbeddedCurveAdd { + input1_x, input1_y, input2_x, input2_y, result, .. + } => { debug_println!( self.enable_debug_trace, " EMBEDDED_CURVE_ADD ({} {}) ({} {}) -> {}", @@ -451,6 +453,15 @@ impl DebugShow { output ); } + BlackBoxOp::ToRadix { input, radix, output } => { + debug_println!( + self.enable_debug_trace, + " TO_RADIX {} {} -> {}", + input, + radix, + output + ); + } } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 732bd3cbc59..38e9bdfa8b8 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -7,7 +7,7 @@ use super::{ }; use acvm::{acir::brillig::MemoryAddress, FieldElement}; -pub(crate) const MAX_STACK_SIZE: usize = 1024; +pub(crate) const MAX_STACK_SIZE: usize = 2048; impl BrilligContext { /// Creates an entry point artifact that will jump to the function label provided. diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 5f180edd05c..90cdbb650c2 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -8,7 +8,7 @@ use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::types::Type as SsaType; use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::brillig::{BrilligInputs, BrilligOutputs}; -use acvm::acir::circuit::opcodes::{BlockId, MemOp}; +use acvm::acir::circuit::opcodes::{BlockId, BlockType, MemOp}; use acvm::acir::circuit::{AssertionPayload, ExpressionOrMemory, Opcode}; use acvm::blackbox_solver; use acvm::brillig_vm::{MemoryValue, VMStatus, VM}; @@ -1776,6 +1776,7 @@ impl AcirContext { block_id: BlockId, len: usize, optional_value: Option, + databus: BlockType, ) -> Result<(), InternalError> { let initialized_values = match optional_value { None => { @@ -1790,7 +1791,11 @@ impl AcirContext { } }; - self.acir_ir.push_opcode(Opcode::MemoryInit { block_id, init: initialized_values }); + self.acir_ir.push_opcode(Opcode::MemoryInit { + block_id, + init: initialized_values, + block_type: databus, + }); Ok(()) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index c1249ae41c8..d23f4abe5f5 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -293,14 +293,13 @@ impl GeneratedAcir { BlackBoxFunc::MultiScalarMul => BlackBoxFuncCall::MultiScalarMul { points: inputs[0].clone(), scalars: inputs[1].clone(), - outputs: (outputs[0], outputs[1]), + outputs: (outputs[0], outputs[1], outputs[2]), }, + BlackBoxFunc::EmbeddedCurveAdd => BlackBoxFuncCall::EmbeddedCurveAdd { - input1_x: inputs[0][0], - input1_y: inputs[1][0], - input2_x: inputs[2][0], - input2_y: inputs[3][0], - outputs: (outputs[0], outputs[1]), + input1: Box::new([inputs[0][0], inputs[1][0], inputs[2][0]]), + input2: Box::new([inputs[3][0], inputs[4][0], inputs[5][0]]), + outputs: (outputs[0], outputs[1], outputs[2]), }, BlackBoxFunc::Keccak256 => { let var_message_size = match inputs.to_vec().pop() { @@ -684,8 +683,8 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // Recursive aggregation has a variable number of inputs BlackBoxFunc::RecursiveAggregation => None, - // Addition over the embedded curve: input are coordinates (x1,y1) and (x2,y2) of the Grumpkin points - BlackBoxFunc::EmbeddedCurveAdd => Some(4), + // Addition over the embedded curve: input are coordinates (x1,y1,infinite1) and (x2,y2,infinite2) of the Grumpkin points + BlackBoxFunc::EmbeddedCurveAdd => Some(6), // Big integer operations take in 0 inputs. They use constants for their inputs. BlackBoxFunc::BigIntAdd @@ -735,7 +734,7 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // Output of operations over the embedded curve // will be 2 field elements representing the point. - BlackBoxFunc::MultiScalarMul | BlackBoxFunc::EmbeddedCurveAdd => Some(2), + BlackBoxFunc::MultiScalarMul | BlackBoxFunc::EmbeddedCurveAdd => Some(3), // Big integer operations return a big integer BlackBoxFunc::BigIntAdd diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 2430a00fd4c..fefe5f6f8e6 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -29,6 +29,7 @@ use crate::brillig::brillig_ir::BrilligContext; use crate::brillig::{brillig_gen::brillig_fn::FunctionContext as BrilligFunctionContext, Brillig}; use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; pub(crate) use acir_ir::generated_acir::GeneratedAcir; +use acvm::acir::circuit::opcodes::BlockType; use noirc_frontend::monomorphization::ast::InlineType; use acvm::acir::circuit::brillig::BrilligBytecode; @@ -1683,7 +1684,18 @@ impl<'a> Context<'a> { len: usize, value: Option, ) -> Result<(), InternalError> { - self.acir_context.initialize_array(array, len, value)?; + let databus = if self.data_bus.call_data.is_some() + && self.block_id(&self.data_bus.call_data.unwrap()) == array + { + BlockType::CallData + } else if self.data_bus.return_data.is_some() + && self.block_id(&self.data_bus.return_data.unwrap()) == array + { + BlockType::ReturnData + } else { + BlockType::Memory + }; + self.acir_context.initialize_array(array, len, value, databus)?; self.initialized_arrays.insert(array); Ok(()) } diff --git a/cspell.json b/cspell.json index eaf3fcd1b00..b4f214c2f27 100644 --- a/cspell.json +++ b/cspell.json @@ -63,6 +63,12 @@ "defunctionalization", "defunctionalize", "defunctionalized", + "demonomorphization", + "demonomorphize", + "demonomorphized", + "demonomorphizer", + "demonomorphizes", + "demonomorphizing", "deque", "desugared", "devcontainer", diff --git a/docs/docs/getting_started/barretenberg/_category_.json b/docs/docs/getting_started/barretenberg/_category_.json new file mode 100644 index 00000000000..27a8e89228d --- /dev/null +++ b/docs/docs/getting_started/barretenberg/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 1, + "label": "Install Barretenberg", + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/getting_started/barretenberg/index.md b/docs/docs/getting_started/barretenberg/index.md new file mode 100644 index 00000000000..048ba39f046 --- /dev/null +++ b/docs/docs/getting_started/barretenberg/index.md @@ -0,0 +1,53 @@ +--- +title: Barretenberg Installation +description: bb is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` +keywords: [ + Barretenberg + bb + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches +] +pagination_next: getting_started/hello_noir/index +--- + +`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. + +## Installing `bb` + +Open a terminal on your machine, and write: + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-aarch64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-aarch64-apple-darwin.tar.gz && \ +tar -xvf ./barretenberg-aarch64-apple-darwin.tar.gz -C $HOME/.barretenberg/ && \ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-x86_64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-x86_64-apple-darwin.tar.gz && \ +tar -xvf ./barretenberg-x86_64-apple-darwin.tar.gz -C $HOME/.barretenberg/ && \ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-x86_64-linux-gnu.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-x86_64-linux-gnu.tar.gz && \ +tar -xvf ./barretenberg-x86_64-linux-gnu.tar.gz -C $HOME/.barretenberg/ && \ +echo -e 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.bashrc && \ +source ~/.bashrc +``` + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/docs/docs/getting_started/hello_noir/_category_.json b/docs/docs/getting_started/hello_noir/_category_.json index 23b560f610b..976a2325de0 100644 --- a/docs/docs/getting_started/hello_noir/_category_.json +++ b/docs/docs/getting_started/hello_noir/_category_.json @@ -1,5 +1,5 @@ { - "position": 1, + "position": 2, "collapsible": true, "collapsed": true } diff --git a/docs/docs/getting_started/hello_noir/index.md b/docs/docs/getting_started/hello_noir/index.md index 743c4d8d634..1ade3f09ae3 100644 --- a/docs/docs/getting_started/hello_noir/index.md +++ b/docs/docs/getting_started/hello_noir/index.md @@ -90,13 +90,11 @@ cd hello_world nargo check ``` -Two additional files would be generated in your project directory: +A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. -_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. +## Execute Our Noir Program -## Prove Our Noir Program - -Now that the project is set up, we can create a proof of correct execution of our Noir program. +Now that the project is set up, we can execute our Noir program. Fill in input values for execution in the _Prover.toml_ file. For example: @@ -105,37 +103,42 @@ x = "1" y = "2" ``` -Prove the valid execution of your Noir program: +Execute your Noir program: ```sh -nargo prove +nargo execute witness-name ``` -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`.proof`, where the project name is defined in Nargo.toml. +The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): +## Prove Our Noir Program -```toml -y = "0x0000000000000000000000000000000000000000000000000000000000000002" +:::info + +Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). + +::: + +Prove the valid execution of your Noir program using `bb`: + +```sh +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof ``` -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. +A new file called `proof` will be generated in your project directory, containing the generated proof for your program. ## Verify Our Noir Program -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. +Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. Verify your proof by running: ```sh -nargo verify +bb write_vk -b ./target/hello_world.json -o ./target/vk +bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. +The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/docs/docs/getting_started/hello_noir/project_breakdown.md b/docs/docs/getting_started/hello_noir/project_breakdown.md index 6160a102c6c..29688df148f 100644 --- a/docs/docs/getting_started/hello_noir/project_breakdown.md +++ b/docs/docs/getting_started/hello_noir/project_breakdown.md @@ -1,10 +1,10 @@ --- title: Project Breakdown description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. + Learn about the anatomy of a Nargo project, including the purpose of the Prover TOML + file, and how to prove and verify your program. keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] + [Nargo, Nargo project, Prover.toml, proof verification, private asset transfer] sidebar_position: 2 --- @@ -18,7 +18,6 @@ commands, you would get a minimal Nargo project of the following structure: - src - Prover.toml - - Verifier.toml - Nargo.toml The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ @@ -28,10 +27,6 @@ file will be generated within it. _Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. -### Verifier.toml - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - ### Nargo.toml _Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. @@ -92,20 +87,15 @@ fn main(x : Field, y : Field) { } ``` -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when verifying the proof. The prover supplies the values for `x` and `y` in the _Prover.toml_ file. -As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is constrained by the proof of the execution of said program (i.e. if the condition was not met, the verifier would reject the proof as an invalid proof). ### Prover.toml -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). +The _Prover.toml_ file is a file which the prover uses to supply the inputs to the Noir program (both private and public). In our hello world program the _Prover.toml_ file looks like this: @@ -114,12 +104,9 @@ x = "1" y = "2" ``` -When the command `nargo prove` is executed, two processes happen: - -1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, - is not equal. This inequality constraint is due to the line `assert(x != y)`. +When the command `nargo execute` is executed, nargo will execute the Noir program using the inputs specified in `Prover.toml`, aborting if it finds that these do not satisfy the constraints defined by `main`. In this example, `x` and `y` must satisfy the inequality constraint `assert(x != y)`. -2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. +If an output name is specified such as `nargo execute foo`, the witness generated by this execution will be written to `./target/foo.gz`. This can then be used to generate a proof of the execution. #### Arrays of Structs @@ -155,45 +142,18 @@ baz = 2 #### Custom toml files -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. +You can specify a `toml` file with a different name to use for execution by using the `--prover-name` or `-p` flags. -This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the default **Prover.toml** and generates the witness and saves it at `./target/foo.gz`: ```bash -nargo prove +nargo execute foo ``` -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the custom **OtherProver.toml** and generates the witness and saves it at `./target/bar.gz`: ```bash -nargo prove -p OtherProver +nargo execute -p OtherProver bar ``` -## Verifying a Proof - -When the command `nargo verify` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs, usually from external sources, and -verify the validity of the proof against it. - -Take a private asset transfer as an example: - -A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/docs/getting_started/tooling/noir_codegen.md b/docs/docs/getting_started/tooling/noir_codegen.md index d65151da0ab..1c040585340 100644 --- a/docs/docs/getting_started/tooling/noir_codegen.md +++ b/docs/docs/getting_started/tooling/noir_codegen.md @@ -2,7 +2,7 @@ title: Noir Codegen for TypeScript description: Learn how to use Noir codegen to generate TypeScript bindings keywords: [Nargo, Noir, compile, TypeScript] -sidebar_position: 2 +sidebar_position: 3 --- When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. diff --git a/docs/docs/how_to/how-to-oracles.md b/docs/docs/how_to/how-to-oracles.md index 8cf8035a5c4..5f427f1e23f 100644 --- a/docs/docs/how_to/how-to-oracles.md +++ b/docs/docs/how_to/how-to-oracles.md @@ -177,7 +177,7 @@ interface ForeignCallResult { ## Step 3 - Usage with Nargo -Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test` and `nargo execute` commands by passing a value to `--oracle-resolver`. For example: ```bash nargo test --oracle-resolver http://localhost:5555 @@ -203,7 +203,7 @@ As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_j Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? -You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. +You don't technically have to, but then how would you run `nargo test`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. ::: diff --git a/docs/docs/how_to/how-to-solidity-verifier.md b/docs/docs/how_to/how-to-solidity-verifier.md index e3c7c1065da..7c96e22b8d5 100644 --- a/docs/docs/how_to/how-to-solidity-verifier.md +++ b/docs/docs/how_to/how-to-solidity-verifier.md @@ -43,11 +43,19 @@ Generating a Solidity Verifier contract is actually a one-command process. Howev This is by far the most straight-forward step. Just run: ```sh -nargo codegen-verifier +nargo compile ``` -A new `contract` folder would then be generated in your project directory, containing the Solidity -file `plonk_vk.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. +This will compile your source code into a Noir build artifact to be stored in the `./target` directory, you can then generate the smart contract using the commands: + +```sh +# Here we pass the path to the newly generated Noir artifact. +bb write_vk -b ./target/.json +bb contract +``` + +replacing `` with the name of your Noir project. A new `contract` folder would then be generated in your project directory, containing the Solidity +file `contract.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. :::info @@ -123,11 +131,25 @@ To verify a proof using the Solidity verifier contract, we call the `verify` fun function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) ``` -When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. Note that the public inputs must be passed in separately to the rest of the proof so we must split the proof as returned from `bb`. +First generate a proof with `bb` at the location `./proof` using the steps in [get started](../getting_started/hello_noir/index.md), this proof is in a binary format but we want to convert it into a hex string to pass into Remix, this can be done with the + +```bash +# This value must be changed to match the number of public inputs (including return values!) in your program. +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=32*NUM_PUBLIC_INPUTS +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) ./proof | od -An -v -t x1 | tr -d $' \n') + +echo "Public inputs:" +echo $HEX_PUBLIC_INPUTS + +echo "Proof:" +echo "0x$HEX_PROOF" ``` -0x...... , [0x0000.....02] -``` + +Remix expects that the public inputs will be split into an array of `bytes32` values so `HEX_PUBLIC_INPUTS` needs to be split up into 32 byte chunks which are prefixed with `0x` accordingly. A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): @@ -144,11 +166,9 @@ function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 null :::info[Return Values] -A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in -Noir. +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in Noir. -Under the hood, the return value is passed as an input to the circuit and is checked at the end of -the circuit program. +Under the hood, the return value is passed as an input to the circuit and is checked at the end of the circuit program. For example, if you have Noir program like this: @@ -162,11 +182,11 @@ fn main( ) -> pub Field ``` -the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. -In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return`. ::: diff --git a/docs/docs/noir/concepts/data_types/booleans.md b/docs/docs/noir/concepts/data_types/booleans.md index 69826fcd724..3dcfa836814 100644 --- a/docs/docs/noir/concepts/data_types/booleans.md +++ b/docs/docs/noir/concepts/data_types/booleans.md @@ -23,9 +23,6 @@ fn main() { } ``` -> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for -> `false` in _Verifier.toml_. - The boolean type is most commonly used in conditionals like `if` expressions and `assert` statements. More about conditionals is covered in the [Control Flow](../control_flow) and [Assert Function](../assert) sections. diff --git a/docs/docs/noir/concepts/data_types/integers.md b/docs/docs/noir/concepts/data_types/integers.md index 6b2d3773912..c14fffa7174 100644 --- a/docs/docs/noir/concepts/data_types/integers.md +++ b/docs/docs/noir/concepts/data_types/integers.md @@ -115,7 +115,7 @@ y = "1" Would result in: ``` -$ nargo prove +$ nargo execute error: Assertion failed: 'attempt to add with overflow' ┌─ ~/src/main.nr:9:13 │ diff --git a/examples/codegen_verifier/.gitignore b/examples/codegen_verifier/.gitignore new file mode 100644 index 00000000000..c0d62c447d3 --- /dev/null +++ b/examples/codegen_verifier/.gitignore @@ -0,0 +1,4 @@ +out +cache +target +src/contract.sol \ No newline at end of file diff --git a/examples/codegen_verifier/Nargo.toml b/examples/codegen_verifier/Nargo.toml new file mode 100644 index 00000000000..2b367f30dbc --- /dev/null +++ b/examples/codegen_verifier/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/examples/codegen_verifier/Prover.toml b/examples/codegen_verifier/Prover.toml new file mode 100644 index 00000000000..2c1854573a4 --- /dev/null +++ b/examples/codegen_verifier/Prover.toml @@ -0,0 +1,2 @@ +x = 1 +y = 2 diff --git a/examples/codegen_verifier/codegen_verifier.sh b/examples/codegen_verifier/codegen_verifier.sh new file mode 100755 index 00000000000..fabd6235a67 --- /dev/null +++ b/examples/codegen_verifier/codegen_verifier.sh @@ -0,0 +1,38 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo compile + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND contract -o ./src/contract.sol + +# We now generate a proof and check whether the verifier contract will verify it. + +nargo execute witness + +PROOF_PATH=./target/proof +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz -o $PROOF_PATH + +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=$((32 * $NUM_PUBLIC_INPUTS)) +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES $PROOF_PATH | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) $PROOF_PATH | od -An -v -t x1 | tr -d $' \n') + +# Spin up an anvil node to deploy the contract to +anvil & + +DEPLOY_INFO=$(forge create UltraVerifier \ + --rpc-url "127.0.0.1:8545" \ + --private-key "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" \ + --json) +VERIFIER_ADDRESS=$(echo $DEPLOY_INFO | jq -r '.deployedTo') + +# Call the verifier contract with our proof. +# Note that we haven't needed to split up `HEX_PUBLIC_INPUTS` as there's only a single public input +cast call $VERIFIER_ADDRESS "verify(bytes, bytes32[])(bool)" "0x$HEX_PROOF" "[0x$HEX_PUBLIC_INPUTS]" + +# Stop anvil node again +kill %- \ No newline at end of file diff --git a/examples/codegen_verifier/foundry.toml b/examples/codegen_verifier/foundry.toml new file mode 100644 index 00000000000..25b918f9c9a --- /dev/null +++ b/examples/codegen_verifier/foundry.toml @@ -0,0 +1,6 @@ +[profile.default] +src = "src" +out = "out" +libs = ["lib"] + +# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options diff --git a/examples/codegen_verifier/src/main.nr b/examples/codegen_verifier/src/main.nr new file mode 100644 index 00000000000..baef0c3786a --- /dev/null +++ b/examples/codegen_verifier/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/examples/codegen_verifier/test.sh b/examples/codegen_verifier/test.sh new file mode 100755 index 00000000000..93c2f6edf51 --- /dev/null +++ b/examples/codegen_verifier/test.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -f ./src/contract.sol + +./codegen_verifier.sh + +if ! [ -f ./src/contract.sol ]; then + printf '%s\n' "Contract not written to file" >&2 + exit 1 +fi \ No newline at end of file diff --git a/examples/prove_and_verify/Nargo.toml b/examples/prove_and_verify/Nargo.toml new file mode 100644 index 00000000000..2b367f30dbc --- /dev/null +++ b/examples/prove_and_verify/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/examples/prove_and_verify/Prover.toml b/examples/prove_and_verify/Prover.toml new file mode 100644 index 00000000000..8c12ebba6cf --- /dev/null +++ b/examples/prove_and_verify/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "2" diff --git a/examples/prove_and_verify/proofs/proof b/examples/prove_and_verify/proofs/proof new file mode 100644 index 00000000000..01d5ad27686 Binary files /dev/null and b/examples/prove_and_verify/proofs/proof differ diff --git a/examples/prove_and_verify/prove_and_verify.sh b/examples/prove_and_verify/prove_and_verify.sh new file mode 100755 index 00000000000..01ee6c70738 --- /dev/null +++ b/examples/prove_and_verify/prove_and_verify.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo execute witness + +# TODO: `bb` should create `proofs` directory if it doesn't exist. +mkdir -p proofs +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND verify -v ./target/vk -p ./proofs/proof \ No newline at end of file diff --git a/examples/prove_and_verify/src/main.nr b/examples/prove_and_verify/src/main.nr new file mode 100644 index 00000000000..baef0c3786a --- /dev/null +++ b/examples/prove_and_verify/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/examples/prove_and_verify/test.sh b/examples/prove_and_verify/test.sh new file mode 100755 index 00000000000..a8ae3cca132 --- /dev/null +++ b/examples/prove_and_verify/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -rf ./target ./proofs + +./prove_and_verify.sh \ No newline at end of file diff --git a/noir_stdlib/src/aes128.nr b/noir_stdlib/src/aes128.nr index e6e2a5e4997..cd61021a953 100644 --- a/noir_stdlib/src/aes128.nr +++ b/noir_stdlib/src/aes128.nr @@ -2,3 +2,6 @@ // docs:start:aes128 pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} // docs:end:aes128 + +#[foreign(aes128_encrypt)] +pub fn aes128_encrypt_slice(input: [u8], iv: [u8; 16], key: [u8; 16]) -> [u8] {} diff --git a/noir_stdlib/src/embedded_curve_ops.nr b/noir_stdlib/src/embedded_curve_ops.nr index 21d658db615..cd8c421e136 100644 --- a/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir_stdlib/src/embedded_curve_ops.nr @@ -1,15 +1,21 @@ use crate::ops::arith::{Add, Sub, Neg}; +use crate::cmp::Eq; // TODO(https://github.com/noir-lang/noir/issues/4931) struct EmbeddedCurvePoint { x: Field, y: Field, + is_infinite: bool } impl EmbeddedCurvePoint { fn double(self) -> EmbeddedCurvePoint { embedded_curve_add(self, self) } + + fn point_at_infinity() -> EmbeddedCurvePoint { + EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true } + } } impl Add for EmbeddedCurvePoint { @@ -28,11 +34,24 @@ impl Neg for EmbeddedCurvePoint { fn neg(self) -> EmbeddedCurvePoint { EmbeddedCurvePoint { x: self.x, - y: -self.y + y: -self.y, + is_infinite: self.is_infinite } } } +impl Eq for EmbeddedCurvePoint { + fn eq(self: Self, b: EmbeddedCurvePoint) -> bool { + (self.is_infinite & b.is_infinite) | ((self.is_infinite == b.is_infinite) & (self.x == b.x) & (self.y == b.y)) + } +} + +// Scalar represented as low and high limbs +struct EmbeddedCurveScalar { + lo: Field, + hi: Field, +} + // Computes a multi scalar multiplication over the embedded curve. // For bn254, We have Grumpkin and Baby JubJub. // For bls12-381, we have JubJub and Bandersnatch. @@ -42,9 +61,9 @@ impl Neg for EmbeddedCurvePoint { #[foreign(multi_scalar_mul)] // docs:start:multi_scalar_mul pub fn multi_scalar_mul( - points: [Field; N], // points represented as x and y coordinates [x1, y1, x2, y2, ...] - scalars: [Field; N] // scalars represented as low and high limbs [low1, high1, low2, high2, ...] -) -> [Field; 2] + points: [EmbeddedCurvePoint; N], + scalars: [EmbeddedCurveScalar; N] +) -> [Field; 3] // docs:end:multi_scalar_mul {} @@ -52,12 +71,12 @@ pub fn multi_scalar_mul( pub fn fixed_base_scalar_mul( scalar_low: Field, scalar_high: Field -) -> [Field; 2] +) -> [Field; 3] // docs:end:fixed_base_scalar_mul { - let g1_x = 1; - let g1_y = 17631683881184975370165255887551781615748388533673675138860; - multi_scalar_mul([g1_x, g1_y], [scalar_low, scalar_high]) + let g1 = EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; + let scalar = EmbeddedCurveScalar { lo: scalar_low, hi: scalar_high }; + multi_scalar_mul([g1], [scalar]) } // This is a hack as returning an `EmbeddedCurvePoint` from a foreign function in brillig returns a [BrilligVariable::SingleAddr; 2] rather than BrilligVariable::BrilligArray @@ -72,8 +91,8 @@ fn embedded_curve_add( let point_array = embedded_curve_add_array_return(point1, point2); let x = point_array[0]; let y = point_array[1]; - EmbeddedCurvePoint { x, y } + EmbeddedCurvePoint { x, y, is_infinite: point_array[2] == 1 } } #[foreign(embedded_curve_add)] -fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 3] {} diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index d70310be391..2e82d9e7c23 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -25,7 +25,7 @@ unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { fn assert_gt_limbs(a: (Field, Field), b: (Field, Field)) { let (alo, ahi) = a; let (blo, bhi) = b; - let borrow = lte_unsafe(alo, blo, 16); + let borrow = lte_unsafe_16(alo, blo); let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; let rhi = ahi - bhi - (borrow as Field); @@ -51,9 +51,9 @@ pub fn decompose(x: Field) -> (Field, Field) { (xlo, xhi) } -unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { - let x_bytes = x.__to_le_radix(256, num_bytes); - let y_bytes = y.__to_le_radix(256, num_bytes); +fn lt_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { + let x_bytes = x.to_le_radix(256, num_bytes); + let y_bytes = y.to_le_radix(256, num_bytes); let mut x_is_lt = false; let mut done = false; for i in 0..num_bytes { @@ -70,8 +70,20 @@ unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { x_is_lt } -unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { - lt_unsafe(x, y, num_bytes) | (x == y) +fn lte_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { + if x == y { + true + } else { + lt_unsafe_internal(x, y, num_bytes) + } +} + +unconstrained fn lt_unsafe_32(x: Field, y: Field) -> bool { + lt_unsafe_internal(x, y, 32) +} + +unconstrained fn lte_unsafe_16(x: Field, y: Field) -> bool { + lte_unsafe_internal(x, y, 16) } pub fn assert_gt(a: Field, b: Field) { @@ -90,7 +102,7 @@ pub fn assert_lt(a: Field, b: Field) { pub fn gt(a: Field, b: Field) -> bool { if a == b { false - } else if lt_unsafe(a, b, 32) { + } else if lt_unsafe_32(a, b) { assert_gt(b, a); false } else { @@ -105,7 +117,10 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{decompose_unsafe, decompose, lt_unsafe, assert_gt, gt, lt, TWO_POW_128, lte_unsafe, PLO, PHI}; + use crate::field::bn254::{ + decompose_unsafe, decompose, lt_unsafe_internal, assert_gt, gt, lt, TWO_POW_128, + lte_unsafe_internal, PLO, PHI + }; #[test] fn check_decompose_unsafe() { @@ -123,23 +138,23 @@ mod tests { #[test] fn check_lt_unsafe() { - assert(lt_unsafe(0, 1, 16)); - assert(lt_unsafe(0, 0x100, 16)); - assert(lt_unsafe(0x100, TWO_POW_128 - 1, 16)); - assert(!lt_unsafe(0, TWO_POW_128, 16)); + assert(lt_unsafe_internal(0, 1, 16)); + assert(lt_unsafe_internal(0, 0x100, 16)); + assert(lt_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); + assert(!lt_unsafe_internal(0, TWO_POW_128, 16)); } #[test] fn check_lte_unsafe() { - assert(lte_unsafe(0, 1, 16)); - assert(lte_unsafe(0, 0x100, 16)); - assert(lte_unsafe(0x100, TWO_POW_128 - 1, 16)); - assert(!lte_unsafe(0, TWO_POW_128, 16)); - - assert(lte_unsafe(0, 0, 16)); - assert(lte_unsafe(0x100, 0x100, 16)); - assert(lte_unsafe(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); - assert(lte_unsafe(TWO_POW_128, TWO_POW_128, 16)); + assert(lte_unsafe_internal(0, 1, 16)); + assert(lte_unsafe_internal(0, 0x100, 16)); + assert(lte_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); + assert(!lte_unsafe_internal(0, TWO_POW_128, 16)); + + assert(lte_unsafe_internal(0, 0, 16)); + assert(lte_unsafe_internal(0x100, 0x100, 16)); + assert(lte_unsafe_internal(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); + assert(lte_unsafe_internal(TWO_POW_128, TWO_POW_128, 16)); } #[test] diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh new file mode 100755 index 00000000000..519ffe6cb03 --- /dev/null +++ b/scripts/install_bb.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# We use this script just for CI so we assume we're running on x86 linux + +mkdir -p $HOME/.barretenberg +curl -o ./barretenberg-x86_64-linux-gnu.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.41.0/barretenberg-x86_64-linux-gnu.tar.gz +tar -xvf ./barretenberg-x86_64-linux-gnu.tar.gz -C $HOME/.barretenberg/ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.bashrc +source ~/.bashrc diff --git a/test_programs/compile_success_empty/intrinsic_die/src/main.nr b/test_programs/compile_success_empty/intrinsic_die/src/main.nr index 9ce17f72c0d..a6c6d3df9a1 100644 --- a/test_programs/compile_success_empty/intrinsic_die/src/main.nr +++ b/test_programs/compile_success_empty/intrinsic_die/src/main.nr @@ -4,5 +4,7 @@ fn main(x: Field) { let hash = std::hash::pedersen_commitment([x]); let g1_x = 0x0000000000000000000000000000000000000000000000000000000000000001; let g1_y = 0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c; - let _p1 = std::embedded_curve_ops::multi_scalar_mul([g1_x, g1_y], [x, 0]); + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: g1_x, y: g1_y, is_infinite: false }; + let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: x, hi: 0 }; + let _p1 = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); } diff --git a/test_programs/execution_success/brillig_embedded_curve/src/main.nr b/test_programs/execution_success/brillig_embedded_curve/src/main.nr index 8a1a7f08975..89a699448dc 100644 --- a/test_programs/execution_success/brillig_embedded_curve/src/main.nr +++ b/test_programs/execution_success/brillig_embedded_curve/src/main.nr @@ -2,22 +2,22 @@ use dep::std; unconstrained fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let g1_y = 17631683881184975370165255887551781615748388533673675138860; - let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; - + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y, is_infinite: false }; + let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Test that multi_scalar_mul correctly derives the public key - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y], [priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); assert(res[0] == pub_x); assert(res[1] == pub_y); // Test that double function calling embedded_curve_add works as expected - let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y, is_infinite: false }; let res = pub_point.double(); let double = g1.add(g1); assert(double.x == res.x); // Test calling multi_scalar_mul with multiple points and scalars - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y, g1.x, g1.y], [priv_key, 0, priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1, g1], [scalar, scalar]); // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res[0]); diff --git a/test_programs/execution_success/embedded_curve_ops/src/main.nr b/test_programs/execution_success/embedded_curve_ops/src/main.nr index 3cb27d8c181..46f919e947a 100644 --- a/test_programs/execution_success/embedded_curve_ops/src/main.nr +++ b/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -2,22 +2,22 @@ use dep::std; fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let g1_y = 17631683881184975370165255887551781615748388533673675138860; - let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; - + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y, is_infinite: false }; + let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Test that multi_scalar_mul correctly derives the public key - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y], [priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); assert(res[0] == pub_x); assert(res[1] == pub_y); // Test that double function calling embedded_curve_add works as expected - let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y, is_infinite: false }; let res = pub_point.double(); let double = g1.add(g1); assert(double.x == res.x); // Test calling multi_scalar_mul with multiple points and scalars - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y, g1.x, g1.y], [priv_key, 0, priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1, g1], [scalar, scalar]); // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res[0]); diff --git a/test_programs/execution_success/trait_method_mut_self/src/main.nr b/test_programs/execution_success/trait_method_mut_self/src/main.nr index 0e736c2f098..fa47fd5d881 100644 --- a/test_programs/execution_success/trait_method_mut_self/src/main.nr +++ b/test_programs/execution_success/trait_method_mut_self/src/main.nr @@ -12,6 +12,14 @@ fn main(x: Field, y: pub Field) { pass_trait_by_mut_ref(&mut a_mut_ref, y); assert(a_mut_ref.x == y); + + let mut hasher = Poseidon2Hasher::default(); + hasher.write(x); + hasher.write(y); + let expected_hash = hasher.finish(); + // Check that we get the same result when using the hasher in a + // method that purely uses trait methods without a supplied implementation. + assert(hash_simple_array::([x, y]) == expected_hash); } trait SomeTrait { @@ -50,3 +58,17 @@ fn pass_trait_by_mut_ref(a_mut_ref: &mut T, value: Field) where T: SomeTrait // We auto add a mutable reference to the object type if the method call expects a mutable self a_mut_ref.set_value(value); } + +fn hash_simple_array(input: [Field; 2]) -> Field where H: Hasher + Default { + // Check that we can call a trait method instead of a trait implementation + // TODO: Need to remove the need for this type annotation + // TODO: Curently, without the annotation we will get `Expression type is ambiguous` when trying to use the `hasher` + let mut hasher: H = H::default(); + // Regression that the object is converted to a mutable reference type `&mut _`. + // Otherwise will see `Expected type &mut _, found type H`. + // Then we need to make sure to also auto dereference later in the type checking process + // when searching for a matching impl or else we will get `No matching impl found for `&mut H: Hasher` + hasher.write(input[0]); + hasher.write(input[1]); + hasher.finish() +} diff --git a/test_programs/execution_success/unit_value/Nargo.toml b/test_programs/execution_success/unit_value/Nargo.toml index f7e3697a7c1..1f9c4524ec5 100644 --- a/test_programs/execution_success/unit_value/Nargo.toml +++ b/test_programs/execution_success/unit_value/Nargo.toml @@ -1,7 +1,7 @@ [package] -name = "short" +name = "unit_value" type = "bin" authors = [""] compiler_version = ">=0.23.0" -[dependencies] \ No newline at end of file +[dependencies] diff --git a/test_programs/noir_test_success/embedded_curve_ops/Nargo.toml b/test_programs/noir_test_success/embedded_curve_ops/Nargo.toml new file mode 100644 index 00000000000..65e6efea538 --- /dev/null +++ b/test_programs/noir_test_success/embedded_curve_ops/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "embedded_curve_ops" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/noir_test_success/embedded_curve_ops/src/main.nr b/test_programs/noir_test_success/embedded_curve_ops/src/main.nr new file mode 100644 index 00000000000..9e3c5d87874 --- /dev/null +++ b/test_programs/noir_test_success/embedded_curve_ops/src/main.nr @@ -0,0 +1,37 @@ +use dep::std::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul}; + +#[test] + + fn test_infinite_point() { + let zero = EmbeddedCurvePoint::point_at_infinity(); + let zero = EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true }; + let g1 = EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; + let g2 = g1 + g1; + + let s1 = EmbeddedCurveScalar { lo: 1, hi: 0 }; + let a = multi_scalar_mul([g1], [s1]); + assert(a[2] == 0); + assert(g1 + zero == g1); + assert(g1 - g1 == zero); + assert(g1 - zero == g1); + assert(zero + zero == zero); + assert( + multi_scalar_mul([g1], [s1]) + == [1, 17631683881184975370165255887551781615748388533673675138860, 0] + ); + assert(multi_scalar_mul([g1, g1], [s1, s1]) == [g2.x, g2.y, 0]); + assert( + multi_scalar_mul( + [g1, zero], + [EmbeddedCurveScalar { lo: 2, hi: 0 }, EmbeddedCurveScalar { lo: 42, hi: 25 }] + ) + == [g2.x, g2.y, 0] + ); + assert( + multi_scalar_mul( + [g1, g1, zero], + [s1, s1, EmbeddedCurveScalar { lo: 42, hi: 25 }] + ) + == [g2.x, g2.y, 0] + ); +} diff --git a/test_programs/rebuild.sh b/test_programs/rebuild.sh index 51e97278281..4733bad10c3 100755 --- a/test_programs/rebuild.sh +++ b/test_programs/rebuild.sh @@ -16,13 +16,14 @@ process_dir() { if [ -d ./target/ ]; then rm -r ./target/ fi - nargo compile --only-acir && nargo execute witness + nargo execute witness if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then rm -r "$current_dir/acir_artifacts/$dir_name/target" fi mkdir $current_dir/acir_artifacts/$dir_name/target + mv ./target/$dir_name.json $current_dir/acir_artifacts/$dir_name/target/program.json mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ cd $current_dir @@ -70,4 +71,4 @@ if [ ! -z "$exit_status" ]; then echo "Rebuild failed!" exit $exit_status fi -echo "Rebuild Succeeded!" \ No newline at end of file +echo "Rebuild Succeeded!" diff --git a/tooling/backend_interface/CHANGELOG.md b/tooling/backend_interface/CHANGELOG.md deleted file mode 100644 index 9ebde989add..00000000000 --- a/tooling/backend_interface/CHANGELOG.md +++ /dev/null @@ -1,233 +0,0 @@ -# Changelog - -## [0.11.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.10.1...v0.11.0) (2023-08-18) - - -### ⚠ BREAKING CHANGES - -* Update `acvm` to 0.22.0 ([#240](https://github.com/noir-lang/acvm-backend-barretenberg/issues/240)) - -### Features - -* Update `acvm` to 0.22.0 ([#240](https://github.com/noir-lang/acvm-backend-barretenberg/issues/240)) ([d8342fd](https://github.com/noir-lang/acvm-backend-barretenberg/commit/d8342fd6da605ac3bbd889edf89cd122bc4689ce)) - -## [0.10.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.10.0...v0.10.1) (2023-08-18) - - -### Features - -* Migrate to `wasmer` 3.3.0 ([#236](https://github.com/noir-lang/acvm-backend-barretenberg/issues/236)) ([e115e38](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e115e38856887c6b1eeead3534534ac7e6327ea9)) - -## [0.10.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.9.1...v0.10.0) (2023-07-26) - - -### ⚠ BREAKING CHANGES - -* Migrate to ACVM 0.21.0 ([#234](https://github.com/noir-lang/acvm-backend-barretenberg/issues/234)) - -### Features - -* Migrate to ACVM 0.21.0 ([#234](https://github.com/noir-lang/acvm-backend-barretenberg/issues/234)) ([15c8676](https://github.com/noir-lang/acvm-backend-barretenberg/commit/15c86768685d2946a767c350f6ef5972c86677eb)) - -## [0.9.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.9.0...v0.9.1) (2023-07-21) - - -### Features - -* add support for atomic memory opcodes ([#232](https://github.com/noir-lang/acvm-backend-barretenberg/issues/232)) ([a7aa6e9](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a7aa6e9505bb402c1b3db0a990845ed26928e7aa)) - -## [0.9.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.8.0...v0.9.0) (2023-07-17) - - -### ⚠ BREAKING CHANGES - -* update to ACVM 0.19.0 ([#230](https://github.com/noir-lang/acvm-backend-barretenberg/issues/230)) - -### Miscellaneous Chores - -* update to ACVM 0.19.0 ([#230](https://github.com/noir-lang/acvm-backend-barretenberg/issues/230)) ([3f1d967](https://github.com/noir-lang/acvm-backend-barretenberg/commit/3f1d9674b904acb02c2a3e52481be8a6104c3a9d)) - -## [0.8.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.7.0...v0.8.0) (2023-07-12) - - -### ⚠ BREAKING CHANGES - -* Update to acvm 0.18.1 ([#228](https://github.com/noir-lang/acvm-backend-barretenberg/issues/228)) - -### Features - -* Update to acvm 0.18.1 ([#228](https://github.com/noir-lang/acvm-backend-barretenberg/issues/228)) ([397098b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/397098b239efbe16785b1c9af108ca9fc4e24497)) - -## [0.7.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.6.1...v0.7.0) (2023-07-08) - - -### ⚠ BREAKING CHANGES - -* **bberg:** add secp256r1 builtin to barretenberg ([#223](https://github.com/noir-lang/acvm-backend-barretenberg/issues/223)) - -### Features - -* **bberg:** add secp256r1 builtin to barretenberg ([#223](https://github.com/noir-lang/acvm-backend-barretenberg/issues/223)) ([ceb4770](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ceb47705a492fcdcea1f3c098aaab42ea8edbf2e)) - -## [0.6.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.6.0...v0.6.1) (2023-07-06) - - -### Features - -* switch RecursiveAggregation support to true ([#225](https://github.com/noir-lang/acvm-backend-barretenberg/issues/225)) ([e9462ae](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e9462ae015ec0dfb0a23ccbb89562071f87940f5)) - -## [0.6.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.5.1...v0.6.0) (2023-07-06) - - -### ⚠ BREAKING CHANGES - -* Update to ACVM 0.16.0 ([#221](https://github.com/noir-lang/acvm-backend-barretenberg/issues/221)) - -### Features - -* Update to ACVM 0.16.0 ([#221](https://github.com/noir-lang/acvm-backend-barretenberg/issues/221)) ([062d5ed](https://github.com/noir-lang/acvm-backend-barretenberg/commit/062d5ed9b476fab8ac8d3ca13371699fb2aac332)) - -## [0.5.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.5.0...v0.5.1) (2023-06-20) - - -### Bug Fixes - -* Remove wasm32 target ([#219](https://github.com/noir-lang/acvm-backend-barretenberg/issues/219)) ([e4cbb6d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e4cbb6d476e8746de33c38506e2fcb970f1c866a)) - -## [0.5.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.4.0...v0.5.0) (2023-06-15) - - -### ⚠ BREAKING CHANGES - -* Update to target ACVM 0.15.0 ([#217](https://github.com/noir-lang/acvm-backend-barretenberg/issues/217)) - -### Features - -* Update to target ACVM 0.15.0 ([#217](https://github.com/noir-lang/acvm-backend-barretenberg/issues/217)) ([9331898](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9331898f161321c8b6a82d5ea850f197952b2ed2)) - -## [0.4.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.3.0...v0.4.0) (2023-06-07) - - -### ⚠ BREAKING CHANGES - -* Recursion ([#207](https://github.com/noir-lang/acvm-backend-barretenberg/issues/207)) - -### Features - -* Recursion ([#207](https://github.com/noir-lang/acvm-backend-barretenberg/issues/207)) ([6fc479b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6fc479b9ae99d59bbfeb1b895d63cdbea469dcaa)) - -## [0.3.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.2.0...v0.3.0) (2023-06-01) - - -### ⚠ BREAKING CHANGES - -* Update to ACVM 0.13.0 ([#205](https://github.com/noir-lang/acvm-backend-barretenberg/issues/205)) -* added keccakvar constraints ([#213](https://github.com/noir-lang/acvm-backend-barretenberg/issues/213)) -* update pedersen hashes for new implementation ([#212](https://github.com/noir-lang/acvm-backend-barretenberg/issues/212)) - -### Features - -* added keccakvar constraints ([91ea65f](https://github.com/noir-lang/acvm-backend-barretenberg/commit/91ea65f6af7039095c7a3af7bc1e4ce302a68a8d)) -* added keccakvar constraints ([#213](https://github.com/noir-lang/acvm-backend-barretenberg/issues/213)) ([91ea65f](https://github.com/noir-lang/acvm-backend-barretenberg/commit/91ea65f6af7039095c7a3af7bc1e4ce302a68a8d)) -* Update to ACVM 0.13.0 ([#205](https://github.com/noir-lang/acvm-backend-barretenberg/issues/205)) ([298446e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/298446ef8b69f528b6e2fd2abb2298d7b0a8118e)) - - -### Bug Fixes - -* Add or cleanup implementations for JS target ([#199](https://github.com/noir-lang/acvm-backend-barretenberg/issues/199)) ([f6134b7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/f6134b7b502cb74882300b0046ab91ab000daf3c)) -* update pedersen hashes for new impl ([9a233ce](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9a233ce8db9984b29b9cce0603f758d5281c89c9)) -* update pedersen hashes for new implementation ([#212](https://github.com/noir-lang/acvm-backend-barretenberg/issues/212)) ([9a233ce](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9a233ce8db9984b29b9cce0603f758d5281c89c9)) - -## [0.2.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.2...v0.2.0) (2023-05-22) - - -### ⚠ BREAKING CHANGES - -* Update to acvm 0.12.0 ([#165](https://github.com/noir-lang/acvm-backend-barretenberg/issues/165)) -* Add serialization logic for RAM and ROM opcodes ([#153](https://github.com/noir-lang/acvm-backend-barretenberg/issues/153)) - -### Features - -* Add serde to `ConstraintSystem` types ([#196](https://github.com/noir-lang/acvm-backend-barretenberg/issues/196)) ([4c04a79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/4c04a79e6d2b0115f3b4526c60f9f7dae8b464ae)) -* Add serialization logic for RAM and ROM opcodes ([#153](https://github.com/noir-lang/acvm-backend-barretenberg/issues/153)) ([3d3847d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/3d3847de70e74a8f65c64e165ad15ae3d31f5350)) -* Update to acvm 0.12.0 ([#165](https://github.com/noir-lang/acvm-backend-barretenberg/issues/165)) ([d613c79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/d613c79584a599f4adbd11d2ce3b61403c185b73)) - -## [0.1.2](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.1...v0.1.2) (2023-05-11) - - -### Bug Fixes - -* Remove star dependencies to allow publishing ([#182](https://github.com/noir-lang/acvm-backend-barretenberg/issues/182)) ([1727a79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/1727a79ce7e66d95528f70c445cb4ec1b1ece636)) - -## [0.1.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.0...v0.1.1) (2023-05-11) - - -### Bug Fixes - -* Add description so crate can be published ([#180](https://github.com/noir-lang/acvm-backend-barretenberg/issues/180)) ([caabf94](https://github.com/noir-lang/acvm-backend-barretenberg/commit/caabf9434031c6023a5e3a436c87fba0a1072539)) - -## 0.1.0 (2023-05-10) - - -### ⚠ BREAKING CHANGES - -* Update to ACVM v0.11.0 ([#151](https://github.com/noir-lang/acvm-backend-barretenberg/issues/151)) -* Add Keccak constraints ([#150](https://github.com/noir-lang/acvm-backend-barretenberg/issues/150)) -* migrate to ACVM 0.10.3 ([#148](https://github.com/noir-lang/acvm-backend-barretenberg/issues/148)) -* remove all crates other than `acvm-backend-barretenberg` and remove workspace ([#147](https://github.com/noir-lang/acvm-backend-barretenberg/issues/147)) -* merge `barretenberg_static_lib` and `barretenberg_wasm` ([#117](https://github.com/noir-lang/acvm-backend-barretenberg/issues/117)) -* remove dead blake2 code ([#137](https://github.com/noir-lang/acvm-backend-barretenberg/issues/137)) -* Implement pseudo-builder pattern for ConstraintSystem & hide struct fields ([#120](https://github.com/noir-lang/acvm-backend-barretenberg/issues/120)) -* return boolean rather than `FieldElement` from `verify_signature` ([#123](https://github.com/noir-lang/acvm-backend-barretenberg/issues/123)) -* avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) -* update to acvm 0.9.0 ([#106](https://github.com/noir-lang/acvm-backend-barretenberg/issues/106)) -* Depend upon upstream barretenberg & switch to UltraPlonk ([#84](https://github.com/noir-lang/acvm-backend-barretenberg/issues/84)) -* update to ACVM 0.7.0 ([#90](https://github.com/noir-lang/acvm-backend-barretenberg/issues/90)) -* Remove create_proof and verify functions ([#82](https://github.com/noir-lang/acvm-backend-barretenberg/issues/82)) -* update to acvm v0.5.0 ([#60](https://github.com/noir-lang/acvm-backend-barretenberg/issues/60)) - -### Features - -* **acvm_interop:** Updates to reflect new acvm methods using pk/vk ([#50](https://github.com/noir-lang/acvm-backend-barretenberg/issues/50)) ([cff757d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/cff757dca7971161e4bd25e7a744d910c37c22be)) -* Add Keccak constraints ([#150](https://github.com/noir-lang/acvm-backend-barretenberg/issues/150)) ([ce2b9ed](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ce2b9ed456bd8d2ad8357c15736d62c2a5812add)) -* allow overriding transcript location with BARRETENBERG_TRANSCRIPT env var ([#86](https://github.com/noir-lang/acvm-backend-barretenberg/issues/86)) ([af92b99](https://github.com/noir-lang/acvm-backend-barretenberg/commit/af92b99c7b5f37e9659931af378a851b3658a80b)) -* **ci:** add concurrency group for rust workflow ([#63](https://github.com/noir-lang/acvm-backend-barretenberg/issues/63)) ([5c936bc](https://github.com/noir-lang/acvm-backend-barretenberg/commit/5c936bc63cc3adcf9d43c9c4ce69053566089ad9)) -* Depend upon upstream barretenberg & switch to UltraPlonk ([#84](https://github.com/noir-lang/acvm-backend-barretenberg/issues/84)) ([8437bf7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8437bf7e08acadf43b55b307545336596a9fe766)) -* Implement pseudo-builder pattern for ConstraintSystem & hide struct fields ([#120](https://github.com/noir-lang/acvm-backend-barretenberg/issues/120)) ([8ed67d6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8ed67d68c71d655e1a6a5c38fa9ea1c3566f771d)) -* Leverage rustls when using downloader crate ([#46](https://github.com/noir-lang/acvm-backend-barretenberg/issues/46)) ([9de36b6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9de36b642d125d1fb4facd1bf60db67946be70ae)) -* merge `barretenberg_static_lib` and `barretenberg_wasm` ([#117](https://github.com/noir-lang/acvm-backend-barretenberg/issues/117)) ([ba1d0d6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ba1d0d61b94de91b15044d97608907c21bfb5299)) -* migrate to ACVM 0.10.3 ([#148](https://github.com/noir-lang/acvm-backend-barretenberg/issues/148)) ([c9fb9e8](https://github.com/noir-lang/acvm-backend-barretenberg/commit/c9fb9e806f1400a2ff7594a0669bec56025220bb)) -* remove all crates other than `acvm-backend-barretenberg` and remove workspace ([#147](https://github.com/noir-lang/acvm-backend-barretenberg/issues/147)) ([8fe7111](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8fe7111ebdcb043764a83436744662e8c3ca5abc)) -* remove dead blake2 code ([#137](https://github.com/noir-lang/acvm-backend-barretenberg/issues/137)) ([14d8a5b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/14d8a5b893eb1cb91d5bde908643b487b41809d6)) -* replace `downloader` dependency with `reqwest` ([#114](https://github.com/noir-lang/acvm-backend-barretenberg/issues/114)) ([dd62231](https://github.com/noir-lang/acvm-backend-barretenberg/commit/dd62231b8bfcee32e1029d31a07895b16159339c)) -* return boolean from `verify_signature` ([e560602](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e560602ebbd547386ca4cab35735ffa92e98ac4b)) -* return boolean rather than `FieldElement` from `check_membership` ([#124](https://github.com/noir-lang/acvm-backend-barretenberg/issues/124)) ([a0a338e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a0a338e2295635a07f6b9e497c029160a5f323bc)) -* return boolean rather than `FieldElement` from `verify_signature` ([#123](https://github.com/noir-lang/acvm-backend-barretenberg/issues/123)) ([e560602](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e560602ebbd547386ca4cab35735ffa92e98ac4b)) -* store transcript in `.nargo/backends` directory ([#91](https://github.com/noir-lang/acvm-backend-barretenberg/issues/91)) ([c6b5023](https://github.com/noir-lang/acvm-backend-barretenberg/commit/c6b50231da065e7550bfe8bddf8e46f4cd8002d7)) -* update `aztec_backend_wasm` to use new serialization ([#94](https://github.com/noir-lang/acvm-backend-barretenberg/issues/94)) ([28014d8](https://github.com/noir-lang/acvm-backend-barretenberg/commit/28014d803d052a7f459e03dbd7b5b9210449b1d0)) -* update to acvm 0.9.0 ([#106](https://github.com/noir-lang/acvm-backend-barretenberg/issues/106)) ([ff350fb](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ff350fb111043964b8a14fc0df62508c87506423)) -* Update to ACVM v0.11.0 ([#151](https://github.com/noir-lang/acvm-backend-barretenberg/issues/151)) ([9202415](https://github.com/noir-lang/acvm-backend-barretenberg/commit/92024155532e15f25acb2f3ed8d5ca78da0fddd9)) -* update to acvm v0.5.0 ([#60](https://github.com/noir-lang/acvm-backend-barretenberg/issues/60)) ([74b4d8d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/74b4d8d8b118e4477880c04149e5e9d93d388384)) - - -### Bug Fixes - -* Avoid exposing internals of Assignments type ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* fix serialization of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) -* Implement random_get for wasm backend ([#102](https://github.com/noir-lang/acvm-backend-barretenberg/issues/102)) ([9c0f06e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9c0f06ef56f23e2b5794e810f433e36ff2c5d6b5)) -* rename gates to opcodes ([#59](https://github.com/noir-lang/acvm-backend-barretenberg/issues/59)) ([6e05307](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6e053072d8b9c5d93c296f10782251ccb597f902)) -* reorganize and ensure contracts can be compiled in Remix ([#112](https://github.com/noir-lang/acvm-backend-barretenberg/issues/112)) ([7ec5693](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7ec5693f194a79c379ae2952bc17a31ee63a42b9)) -* replace `serialize_circuit` function with `from<&Circuit>` ([#118](https://github.com/noir-lang/acvm-backend-barretenberg/issues/118)) ([94f83a7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/94f83a78e32d91dfb7ae9824923695d9b4c425b0)) -* Replace serialize_circuit function with `from<&Circuit>` ([94f83a7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/94f83a78e32d91dfb7ae9824923695d9b4c425b0)) -* Update bb-sys to resolve bugs in some environments ([#129](https://github.com/noir-lang/acvm-backend-barretenberg/issues/129)) ([e3d4504](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e3d4504f15e1295e637c4da80b1d08c87c267c45)) -* Update dependency containing pk write fix for large general circuits ([#78](https://github.com/noir-lang/acvm-backend-barretenberg/issues/78)) ([2cb523d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/2cb523d2ab95249157b22e198d9dcd6841c3eed8)) -* Update to bb-sys 0.1.1 and update bb in lockfile ([00bb157](https://github.com/noir-lang/acvm-backend-barretenberg/commit/00bb15779dfb64539eeb3f3bb4c4deeba106f2fe)) -* update to bb-sys 0.1.1 and update bb in lockfile ([#111](https://github.com/noir-lang/acvm-backend-barretenberg/issues/111)) ([00bb157](https://github.com/noir-lang/acvm-backend-barretenberg/commit/00bb15779dfb64539eeb3f3bb4c4deeba106f2fe)) -* use `Barretenberg.call` to query circuit size from wasm ([#121](https://github.com/noir-lang/acvm-backend-barretenberg/issues/121)) ([a775af1](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a775af14137cc7bc2e9d8a063fa718a5a9abe6cb)) - - -### Miscellaneous Chores - -* Remove create_proof and verify functions ([#82](https://github.com/noir-lang/acvm-backend-barretenberg/issues/82)) ([ad0c422](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ad0c4228488457bd155ff381186ecf583f18bfac)) -* update to ACVM 0.7.0 ([#90](https://github.com/noir-lang/acvm-backend-barretenberg/issues/90)) ([6c03687](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6c036870a6a8e26612ab8b4f90a162f7540b42e2)) diff --git a/tooling/backend_interface/Cargo.toml b/tooling/backend_interface/Cargo.toml deleted file mode 100644 index f6b5d5d0132..00000000000 --- a/tooling/backend_interface/Cargo.toml +++ /dev/null @@ -1,35 +0,0 @@ -[package] -name = "backend-interface" -description = "The definition of the backend CLI interface which Nargo uses for proving/verifying ACIR circuits." -version.workspace = true -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true -dirs.workspace = true -thiserror.workspace = true -serde_json.workspace = true -bb_abstraction_leaks.workspace = true -tracing.workspace = true - -tempfile.workspace = true - -## bb binary downloading -tar = "~0.4.15" -flate2 = "~1.0.1" -reqwest = { version = "0.11.20", default-features = false, features = [ - "rustls-tls", - "blocking", -] } - -[dev-dependencies] -test-binary = "3.0.1" - -[build-dependencies] -build-target = "0.4.0" -const_format.workspace = true diff --git a/tooling/backend_interface/src/cli/contract.rs b/tooling/backend_interface/src/cli/contract.rs deleted file mode 100644 index e83fc1909b6..00000000000 --- a/tooling/backend_interface/src/cli/contract.rs +++ /dev/null @@ -1,71 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VerifyCommand will call the barretenberg binary -/// to return a solidity library with the verification key -/// that can be used to verify proofs on-chain. -/// -/// This does not return a Solidity file that is able -/// to verify a proof. See acvm_interop/contract.sol for the -/// remaining logic that is missing. -pub(crate) struct ContractCommand { - pub(crate) crs_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ContractCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("contract") - .arg("-c") - .arg(self.crs_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - - if output.status.success() { - String::from_utf8(output.stdout) - .map_err(|error| BackendError::InvalidUTF8Vector(error.into_bytes())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn contract_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let vk_path = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - - let write_vk_command = super::WriteVkCommand { - bytecode_path, - vk_path_output: vk_path.clone(), - crs_path: crs_path.clone(), - }; - write_vk_command.run(backend.binary_path())?; - - let contract_command = ContractCommand { vk_path, crs_path }; - contract_command.run(backend.binary_path())?; - - drop(temp_directory); - - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/gates.rs b/tooling/backend_interface/src/cli/gates.rs deleted file mode 100644 index aca05f0232a..00000000000 --- a/tooling/backend_interface/src/cli/gates.rs +++ /dev/null @@ -1,64 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// GatesCommand will call the barretenberg binary -/// to return the number of gates needed to create a proof -/// for the given bytecode. -pub(crate) struct GatesCommand { - pub(crate) crs_path: PathBuf, - pub(crate) bytecode_path: PathBuf, -} - -impl GatesCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let output = std::process::Command::new(binary_path) - .arg("gates") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.bytecode_path) - .output()?; - - if !output.status.success() { - return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); - } - // Note: barretenberg includes the newline, so that subsequent prints to stdout - // are not on the same line as the gates output. - - const EXPECTED_BYTES: usize = 8; - let gates_bytes: [u8; EXPECTED_BYTES] = - output.stdout.as_slice().try_into().map_err(|_| { - BackendError::UnexpectedNumberOfBytes(EXPECTED_BYTES, output.stdout.clone()) - })?; - - // Convert bytes to u64 in little-endian format - let value = u64::from_le_bytes(gates_bytes); - - Ok(value as u32) - } -} - -#[test] -fn gate_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - - let gate_command = GatesCommand { crs_path, bytecode_path }; - - let output = gate_command.run(backend.binary_path())?; - // Mock backend always returns zero gates. - assert_eq!(output, 0); - - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/mod.rs b/tooling/backend_interface/src/cli/mod.rs deleted file mode 100644 index df43bd5cc2f..00000000000 --- a/tooling/backend_interface/src/cli/mod.rs +++ /dev/null @@ -1,39 +0,0 @@ -// Reference: https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/main.cpp - -mod contract; -mod gates; -mod proof_as_fields; -mod prove; -mod verify; -mod version; -mod vk_as_fields; -mod write_vk; - -pub(crate) use contract::ContractCommand; -pub(crate) use gates::GatesCommand; -pub(crate) use proof_as_fields::ProofAsFieldsCommand; -pub(crate) use prove::ProveCommand; -pub(crate) use verify::VerifyCommand; -pub(crate) use version::VersionCommand; -pub(crate) use vk_as_fields::VkAsFieldsCommand; -pub(crate) use write_vk::WriteVkCommand; - -#[test] -fn no_command_provided_works() -> Result<(), crate::BackendError> { - // This is a simple test to check that the binaries work - - let backend = crate::get_mock_backend()?; - - let output = std::process::Command::new(backend.binary_path()).output()?; - - let stderr = string_from_stderr(&output.stderr); - // Assert help message is printed due to no command being provided. - assert!(stderr.contains("Usage: mock_backend ")); - - Ok(()) -} - -// Converts a stderr byte array to a string (including invalid characters) -fn string_from_stderr(stderr: &[u8]) -> String { - String::from_utf8_lossy(stderr).to_string() -} diff --git a/tooling/backend_interface/src/cli/proof_as_fields.rs b/tooling/backend_interface/src/cli/proof_as_fields.rs deleted file mode 100644 index 7eb1c1ef35c..00000000000 --- a/tooling/backend_interface/src/cli/proof_as_fields.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// `ProofAsFieldsCommand` will call the barretenberg binary -/// to split a proof into a representation as [`FieldElement`]s. -pub(crate) struct ProofAsFieldsCommand { - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ProofAsFieldsCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("proof_as_fields") - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/tooling/backend_interface/src/cli/prove.rs b/tooling/backend_interface/src/cli/prove.rs deleted file mode 100644 index c63d8afab54..00000000000 --- a/tooling/backend_interface/src/cli/prove.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// ProveCommand will call the barretenberg binary -/// to create a proof, given the witness and the bytecode. -/// -/// Note:Internally barretenberg will create and discard the -/// proving key, so this is not returned. -/// -/// The proof will be written to the specified output file. -pub(crate) struct ProveCommand { - pub(crate) crs_path: PathBuf, - pub(crate) bytecode_path: PathBuf, - pub(crate) witness_path: PathBuf, -} - -impl ProveCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("prove") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.bytecode_path) - .arg("-w") - .arg(self.witness_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - Ok(output.stdout) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn prove_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let witness_path = temp_directory_path.join("witness.tr"); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let crs_path = backend.backend_directory(); - let prove_command = ProveCommand { crs_path, bytecode_path, witness_path }; - - let proof = prove_command.run(backend.binary_path())?; - assert_eq!(proof, "proof".as_bytes()); - drop(temp_directory); - - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/verify.rs b/tooling/backend_interface/src/cli/verify.rs deleted file mode 100644 index 1a4ba50b7de..00000000000 --- a/tooling/backend_interface/src/cli/verify.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -/// VerifyCommand will call the barretenberg binary -/// to verify a proof -pub(crate) struct VerifyCommand { - pub(crate) crs_path: PathBuf, - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl VerifyCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("verify") - .arg("-c") - .arg(self.crs_path) - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path); - - let output = command.output()?; - - // We currently do not distinguish between an invalid proof and an error inside the backend. - Ok(output.status.success()) - } -} - -#[test] -fn verify_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - use super::{ProveCommand, WriteVkCommand}; - use crate::proof_system::write_to_file; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let witness_path = temp_directory_path.join("witness.tr"); - let proof_path = temp_directory_path.join("1_mul.proof"); - let vk_path_output = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { - bytecode_path: bytecode_path.clone(), - crs_path: crs_path.clone(), - vk_path_output: vk_path_output.clone(), - }; - - write_vk_command.run(backend.binary_path())?; - - let prove_command = ProveCommand { crs_path: crs_path.clone(), bytecode_path, witness_path }; - let proof = prove_command.run(backend.binary_path())?; - - write_to_file(&proof, &proof_path); - - let verify_command = VerifyCommand { crs_path, proof_path, vk_path: vk_path_output }; - - let verified = verify_command.run(backend.binary_path())?; - assert!(verified); - - drop(temp_directory); - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/version.rs b/tooling/backend_interface/src/cli/version.rs deleted file mode 100644 index 83ab72a870e..00000000000 --- a/tooling/backend_interface/src/cli/version.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::path::Path; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VersionCommand will call the backend binary -/// to query installed version. -pub(crate) struct VersionCommand; - -impl VersionCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command.arg("--version"); - - let output = command.output()?; - if output.status.success() { - match String::from_utf8(output.stdout) { - Ok(result) => Ok(result), - Err(_) => Err(BackendError::CommandFailed( - "Unexpected output from --version check.".to_owned(), - )), - } - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/tooling/backend_interface/src/cli/vk_as_fields.rs b/tooling/backend_interface/src/cli/vk_as_fields.rs deleted file mode 100644 index 1b0212241c4..00000000000 --- a/tooling/backend_interface/src/cli/vk_as_fields.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VkAsFieldsCommand will call the barretenberg binary -/// to split a verification key into a representation as [`FieldElement`]s. -/// -/// The hash of the verification key will also be returned. -pub(crate) struct VkAsFieldsCommand { - pub(crate) vk_path: PathBuf, -} - -impl VkAsFieldsCommand { - pub(crate) fn run( - self, - binary_path: &Path, - ) -> Result<(FieldElement, Vec), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command.arg("vk_as_fields").arg("-k").arg(self.vk_path).arg("-o").arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - let mut fields: Vec = serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string()))?; - - // The first element of this vector is the hash of the verification key, we want to split that off. - let hash = fields.remove(0); - Ok((hash, fields)) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/tooling/backend_interface/src/cli/write_vk.rs b/tooling/backend_interface/src/cli/write_vk.rs deleted file mode 100644 index da9fc04cbef..00000000000 --- a/tooling/backend_interface/src/cli/write_vk.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::path::{Path, PathBuf}; - -use super::string_from_stderr; -use crate::BackendError; - -/// WriteCommand will call the barretenberg binary -/// to write a verification key to a file -pub(crate) struct WriteVkCommand { - pub(crate) crs_path: PathBuf, - pub(crate) bytecode_path: PathBuf, - pub(crate) vk_path_output: PathBuf, -} - -impl WriteVkCommand { - #[tracing::instrument(level = "trace", name = "vk_generation", skip_all)] - pub(crate) fn run(self, binary_path: &Path) -> Result<(), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("write_vk") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.bytecode_path) - .arg("-o") - .arg(self.vk_path_output); - - let output = command.output()?; - if output.status.success() { - Ok(()) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn write_vk_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let vk_path_output = temp_directory.path().join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { bytecode_path, crs_path, vk_path_output }; - - write_vk_command.run(backend.binary_path())?; - drop(temp_directory); - - Ok(()) -} diff --git a/tooling/backend_interface/src/download.rs b/tooling/backend_interface/src/download.rs deleted file mode 100644 index 60ecb14e642..00000000000 --- a/tooling/backend_interface/src/download.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::{ - io::{Cursor, ErrorKind}, - path::Path, -}; - -/// Downloads a zipped archive and unpacks the backend binary to `destination_path`. -/// -/// # Backend Requirements -/// -/// In order for a backend to be compatible with this function: -/// - `backend_url` must serve a gzipped tarball. -/// - The tarball must only contain the backend's binary. -/// - The binary file must be located at the archive root. -pub fn download_backend(backend_url: &str, destination_path: &Path) -> std::io::Result<()> { - use flate2::read::GzDecoder; - use tar::Archive; - use tempfile::tempdir; - - // Download sources - let compressed_file: Cursor> = download_binary_from_url(backend_url).map_err(|_| { - std::io::Error::new( - ErrorKind::Other, - format!("Could not download backend from install url: {backend_url}"), - ) - })?; - - // Unpack the tarball - let gz_decoder = GzDecoder::new(compressed_file); - let mut archive = Archive::new(gz_decoder); - - let temp_directory = tempdir()?; - archive.unpack(&temp_directory)?; - - // Assume that the archive contains a single file which is the backend binary. - let mut archive_files = std::fs::read_dir(&temp_directory)?; - let temp_binary_path = archive_files.next().unwrap()?.path(); - - // Create directory to place binary in. - std::fs::create_dir_all(destination_path.parent().unwrap())?; - - // Rename the binary to the desired name - std::fs::copy(temp_binary_path, destination_path)?; - - drop(temp_directory); - - Ok(()) -} - -/// Try to download the specified URL into a buffer which is returned. -fn download_binary_from_url(url: &str) -> Result>, reqwest::Error> { - let response = reqwest::blocking::get(url)?; - - let bytes = response.bytes()?; - - // TODO: Check SHA of downloaded binary - - Ok(Cursor::new(bytes.to_vec())) -} diff --git a/tooling/backend_interface/src/lib.rs b/tooling/backend_interface/src/lib.rs deleted file mode 100644 index eab98852555..00000000000 --- a/tooling/backend_interface/src/lib.rs +++ /dev/null @@ -1,150 +0,0 @@ -#![warn(unused_crate_dependencies, unused_extern_crates)] -#![warn(unreachable_pub)] - -use std::path::PathBuf; - -mod cli; -mod download; -mod proof_system; -mod smart_contract; - -pub use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; -use bb_abstraction_leaks::BB_VERSION; -use cli::VersionCommand; -pub use download::download_backend; -use tracing::warn; - -const BACKENDS_DIR: &str = ".nargo/backends"; - -pub fn backends_directory() -> PathBuf { - let home_directory = dirs::home_dir().unwrap(); - home_directory.join(BACKENDS_DIR) -} - -#[cfg(test)] -test_binary::build_test_binary_once!(mock_backend, "test-binaries"); - -#[cfg(test)] -fn get_mock_backend() -> Result { - std::env::set_var("NARGO_BACKEND_PATH", path_to_mock_backend()); - - let mock_backend = Backend::new("mock_backend".to_string()); - mock_backend.assert_binary_exists()?; - - Ok(mock_backend) -} - -#[derive(Debug, thiserror::Error)] -pub enum BackendError { - #[error(transparent)] - IoError(#[from] std::io::Error), - - #[error("Backend binary does not exist")] - MissingBinary, - - #[error("The backend responded with a malformed UTF8 byte vector: {0:?}")] - InvalidUTF8Vector(Vec), - - #[error( - "The backend responded with a unexpected number of bytes. Expected: {0} but got {} ({1:?})", .1.len() - )] - UnexpectedNumberOfBytes(usize, Vec), - - #[error("The backend encountered an error: {0:?}")] - CommandFailed(String), -} - -#[derive(Debug)] -pub struct Backend { - name: String, - binary_path: PathBuf, -} - -impl Backend { - pub fn new(name: String) -> Backend { - let binary_path = if let Some(binary_path) = std::env::var_os("NARGO_BACKEND_PATH") { - PathBuf::from(binary_path) - } else { - const BINARY_NAME: &str = "backend_binary"; - - backends_directory().join(&name).join(BINARY_NAME) - }; - Backend { name, binary_path } - } - - pub fn name(&self) -> &str { - &self.name - } - - fn binary_path(&self) -> &PathBuf { - &self.binary_path - } - - fn assert_binary_exists(&self) -> Result<&PathBuf, BackendError> { - let binary_path = self.binary_path(); - if binary_path.is_file() { - Ok(binary_path) - } else { - if self.name == ACVM_BACKEND_BARRETENBERG { - // If we're trying to use barretenberg, automatically go and install it. - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - return Ok(binary_path); - } - Err(BackendError::MissingBinary) - } - } - - fn backend_directory(&self) -> PathBuf { - self.binary_path() - .parent() - .expect("backend binary should have a parent directory") - .to_path_buf() - } - - fn crs_directory(&self) -> PathBuf { - self.backend_directory().join("crs") - } - - fn assert_correct_version(&self) -> Result<&PathBuf, BackendError> { - let binary_path = self.binary_path(); - if binary_path.to_string_lossy().contains(ACVM_BACKEND_BARRETENBERG) { - match VersionCommand.run(binary_path) { - // If version matches then do nothing. - Ok(version_string) if version_string == BB_VERSION => (), - - // If version doesn't match then download the correct version. - Ok(version_string) => { - warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - } - - // If `bb` fails to report its version, then attempt to fix it by re-downloading the binary. - Err(_) => { - warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - } - } - } - Ok(binary_path) - } -} - -#[cfg(test)] -mod backend { - use crate::{Backend, BackendError}; - - #[test] - fn raises_error_on_missing_binary() { - let bad_backend = Backend::new("i_don't_exist".to_string()); - - let binary_path = bad_backend.assert_binary_exists(); - - assert!(matches!(binary_path, Err(BackendError::MissingBinary))); - } -} diff --git a/tooling/backend_interface/src/proof_system.rs b/tooling/backend_interface/src/proof_system.rs deleted file mode 100644 index 20a6dcf70f1..00000000000 --- a/tooling/backend_interface/src/proof_system.rs +++ /dev/null @@ -1,169 +0,0 @@ -use std::fs::File; -use std::io::Write; -use std::path::Path; - -use acvm::acir::{ - circuit::Program, - native_types::{WitnessMap, WitnessStack}, -}; -use acvm::FieldElement; -use tempfile::tempdir; -use tracing::warn; - -use crate::cli::{ - GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, - WriteVkCommand, -}; -use crate::{Backend, BackendError}; - -impl Backend { - pub fn get_exact_circuit_size(&self, program: &Program) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the circuit - let circuit_path = temp_directory.join("circuit").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &circuit_path); - - GatesCommand { crs_path: self.crs_directory(), bytecode_path: circuit_path } - .run(binary_path) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn prove( - &self, - program: &Program, - witness_stack: WitnessStack, - ) -> Result, BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the witness - let serialized_witnesses: Vec = - witness_stack.try_into().expect("could not serialize witness map"); - let witness_path = temp_directory.join("witness").with_extension("tr"); - write_to_file(&serialized_witnesses, &witness_path); - - // Create a temporary file for the circuit - // - let bytecode_path = temp_directory.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create proof and store it in the specified path - let proof_with_public_inputs = - ProveCommand { crs_path: self.crs_directory(), bytecode_path, witness_path } - .run(binary_path)?; - - let proof = bb_abstraction_leaks::remove_public_inputs( - // TODO(https://github.com/noir-lang/noir/issues/4428) - program.functions[0].public_inputs().0.len(), - &proof_with_public_inputs, - ); - Ok(proof) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn verify( - &self, - proof: &[u8], - public_inputs: WitnessMap, - program: &Program, - ) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the proof - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Create a temporary file for the circuit - let bytecode_path = temp_directory.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - bytecode_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Verify the proof - VerifyCommand { crs_path: self.crs_directory(), proof_path, vk_path }.run(binary_path) - } - - pub fn get_intermediate_proof_artifacts( - &self, - program: &Program, - proof: &[u8], - public_inputs: WitnessMap, - ) -> Result<(Vec, FieldElement, Vec), BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the circuit - // - let bytecode_path = temp_directory.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - bytecode_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Create a temporary file for the proof - - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Now ready to generate intermediate artifacts. - - let proof_as_fields = - ProofAsFieldsCommand { proof_path, vk_path: vk_path.clone() }.run(binary_path)?; - - let (vk_hash, vk_as_fields) = VkAsFieldsCommand { vk_path }.run(binary_path)?; - - Ok((proof_as_fields, vk_hash, vk_as_fields)) - } -} - -pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { - let display = path.display(); - - let mut file = match File::create(path) { - Err(why) => panic!("couldn't create {display}: {why}"), - Ok(file) => file, - }; - - match file.write_all(bytes) { - Err(why) => panic!("couldn't write to {display}: {why}"), - Ok(_) => display.to_string(), - } -} diff --git a/tooling/backend_interface/src/smart_contract.rs b/tooling/backend_interface/src/smart_contract.rs deleted file mode 100644 index 153ab52c83f..00000000000 --- a/tooling/backend_interface/src/smart_contract.rs +++ /dev/null @@ -1,70 +0,0 @@ -use super::proof_system::write_to_file; -use crate::{ - cli::{ContractCommand, WriteVkCommand}, - Backend, BackendError, -}; -use acvm::acir::circuit::Program; -use tempfile::tempdir; - -impl Backend { - pub fn eth_contract(&self, program: &Program) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path().to_path_buf(); - - // Create a temporary file for the circuit - let bytecode_path = temp_directory_path.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory_path.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - bytecode_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - ContractCommand { crs_path: self.crs_directory(), vk_path }.run(binary_path) - } -} - -#[cfg(test)] -mod tests { - use std::collections::BTreeSet; - - use acvm::acir::{ - circuit::{Circuit, ExpressionWidth, Opcode, Program, PublicInputs}, - native_types::{Expression, Witness}, - }; - - use crate::{get_mock_backend, BackendError}; - - #[test] - fn test_smart_contract() -> Result<(), BackendError> { - let expression = &(Witness(1) + Witness(2)) - &Expression::from(Witness(3)); - let constraint = Opcode::AssertZero(expression); - - let circuit = Circuit { - current_witness_index: 4, - expression_width: ExpressionWidth::Bounded { width: 4 }, - opcodes: vec![constraint], - private_parameters: BTreeSet::from([Witness(1), Witness(2)]), - public_parameters: PublicInputs::default(), - return_values: PublicInputs::default(), - assert_messages: Default::default(), - recursive: false, - }; - let program = Program { functions: vec![circuit], unconstrained_functions: Vec::new() }; - - let contract = get_mock_backend()?.eth_contract(&program)?; - - assert!(contract.contains("contract VerifierContract")); - - Ok(()) - } -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock b/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock deleted file mode 100644 index 3c14a936907..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock +++ /dev/null @@ -1,223 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "anstream" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" - -[[package]] -name = "anstyle-parse" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" -dependencies = [ - "windows-sys", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" -dependencies = [ - "anstyle", - "windows-sys", -] - -[[package]] -name = "clap" -version = "4.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "clap_lex" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" - -[[package]] -name = "colorchoice" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "mock_backend" -version = "0.1.0" -dependencies = [ - "clap", -] - -[[package]] -name = "proc-macro2" -version = "1.0.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "syn" -version = "2.0.48" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "utf8parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" diff --git a/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml b/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml deleted file mode 100644 index f527b03a7b9..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[workspace] - -[package] -name = "mock_backend" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -clap = { version = "4.3.19", features = ["derive"] } diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs deleted file mode 100644 index 7ee41121d61..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs +++ /dev/null @@ -1,21 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct ContractCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'k')] - pub(crate) vk_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) contract_path: PathBuf, -} - -pub(crate) fn run(args: ContractCommand) { - assert!(args.vk_path.is_file(), "Could not find vk file at provided path"); - - std::io::stdout().write_all(b"contract VerifierContract {}").unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs deleted file mode 100644 index 3cc397d3292..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs +++ /dev/null @@ -1,18 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct GatesCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, -} - -pub(crate) fn run(args: GatesCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - - std::io::stdout().write_all(&0u64.to_le_bytes()).unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/main.rs b/tooling/backend_interface/test-binaries/mock_backend/src/main.rs deleted file mode 100644 index 74ea82d28f8..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/main.rs +++ /dev/null @@ -1,41 +0,0 @@ -#![forbid(unsafe_code)] -#![warn(unreachable_pub)] -#![warn(clippy::semicolon_if_nothing_returned)] -#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] - -use clap::{Parser, Subcommand}; - -mod contract_cmd; -mod gates_cmd; -mod prove_cmd; -mod verify_cmd; -mod write_vk_cmd; - -#[derive(Parser, Debug)] -#[command(name = "mock_backend")] -struct BackendCli { - #[command(subcommand)] - command: BackendCommand, -} - -#[derive(Subcommand, Clone, Debug)] -enum BackendCommand { - Contract(contract_cmd::ContractCommand), - Gates(gates_cmd::GatesCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), - #[command(name = "write_vk")] - WriteVk(write_vk_cmd::WriteVkCommand), -} - -fn main() { - let BackendCli { command } = BackendCli::parse(); - - match command { - BackendCommand::Contract(args) => contract_cmd::run(args), - BackendCommand::Gates(args) => gates_cmd::run(args), - BackendCommand::Prove(args) => prove_cmd::run(args), - BackendCommand::Verify(args) => verify_cmd::run(args), - BackendCommand::WriteVk(args) => write_vk_cmd::run(args), - }; -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs deleted file mode 100644 index 3967778d4e8..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs +++ /dev/null @@ -1,25 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct ProveCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, - - #[clap(short = 'w')] - pub(crate) witness_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) proof_path: PathBuf, -} - -pub(crate) fn run(args: ProveCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - assert!(args.witness_path.is_file(), "Could not find witness file at provided path"); - - std::io::stdout().write_all(b"proof").unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs deleted file mode 100644 index 1a715eea880..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs +++ /dev/null @@ -1,24 +0,0 @@ -use clap::Args; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct VerifyCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'p')] - pub(crate) proof_path: PathBuf, - - #[clap(short = 'k')] - pub(crate) vk_path: PathBuf, - - #[clap(short = 'r')] - pub(crate) is_recursive: bool, -} - -pub(crate) fn run(args: VerifyCommand) { - assert!(args.vk_path.is_file(), "Could not find verification key file at provided path"); - assert!(args.proof_path.is_file(), "Could not find proof file at provided path"); - - std::fs::write(args.proof_path, "proof").unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs deleted file mode 100644 index fcee224e85b..00000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs +++ /dev/null @@ -1,20 +0,0 @@ -use clap::Args; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct WriteVkCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) vk_path: PathBuf, -} - -pub(crate) fn run(args: WriteVkCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - - std::fs::write(args.vk_path, "vk").unwrap(); -} diff --git a/tooling/bb_abstraction_leaks/Cargo.toml b/tooling/bb_abstraction_leaks/Cargo.toml deleted file mode 100644 index 972c78831a7..00000000000 --- a/tooling/bb_abstraction_leaks/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "bb_abstraction_leaks" -description = "A crate which encapsulates knowledge about Barretenberg which is currently leaking into Nargo" -version = "0.11.0" -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true - -[build-dependencies] -build-target = "0.4.0" -const_format.workspace = true diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs deleted file mode 100644 index 45da7f9d00c..00000000000 --- a/tooling/bb_abstraction_leaks/build.rs +++ /dev/null @@ -1,58 +0,0 @@ -use build_target::{Arch, Os}; -use const_format::formatcp; - -// Useful for printing debugging messages during the build -// macro_rules! p { -// ($($tokens: tt)*) => { -// println!("cargo:warning={}", format!($($tokens)*)) -// } -// } - -const USERNAME: &str = "AztecProtocol"; -const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.38.0"; -const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); - -const API_URL: &str = - formatcp!("https://github.com/{}/{}/releases/download/{}", USERNAME, REPO, TAG); - -fn main() -> Result<(), String> { - // We need to inject which OS we're building for so that we can download the correct barretenberg binary. - let os = match build_target::target_os().unwrap() { - os @ (Os::Linux | Os::MacOs) => os, - Os::Windows => todo!("Windows is not currently supported"), - os_name => panic!("Unsupported OS {os_name}"), - }; - - let arch = match build_target::target_arch().unwrap() { - arch @ (Arch::X86_64 | Arch::AARCH64) => arch, - arch_name => panic!("Unsupported Architecture {arch_name}"), - }; - - // Arm builds of linux are not supported - // We do not panic because we allow users to run nargo without a backend. - if let (Os::Linux, Arch::AARCH64) = (&os, &arch) { - println!( - "cargo:warning=ARM64 builds of linux are not supported for the barretenberg binary" - ); - }; - - println!("cargo:rustc-env=BB_BINARY_URL={}", get_bb_download_url(arch, os)); - println!("cargo:rustc-env=BB_VERSION={}", VERSION); - - Ok(()) -} - -fn get_bb_download_url(target_arch: Arch, target_os: Os) -> String { - let archive_name = match target_os { - Os::Linux => "barretenberg-x86_64-linux-gnu.tar.gz", - Os::MacOs => match target_arch { - Arch::AARCH64 => "barretenberg-aarch64-apple-darwin.tar.gz", - Arch::X86_64 => "barretenberg-x86_64-apple-darwin.tar.gz", - arch => panic!("unsupported arch {arch}"), - }, - os => panic!("Unsupported OS {os}"), - }; - - format!("{API_URL}/{archive_name}") -} diff --git a/tooling/bb_abstraction_leaks/src/lib.rs b/tooling/bb_abstraction_leaks/src/lib.rs deleted file mode 100644 index 56a4f58cd21..00000000000 --- a/tooling/bb_abstraction_leaks/src/lib.rs +++ /dev/null @@ -1,26 +0,0 @@ -#![warn(unused_crate_dependencies, unused_extern_crates)] -#![warn(unreachable_pub)] - -use acvm::{acir::native_types::WitnessMap, FieldElement}; - -pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; -pub const BB_DOWNLOAD_URL: &str = env!("BB_BINARY_URL"); -pub const BB_VERSION: &str = env!("BB_VERSION"); - -/// Removes the public inputs which are prepended to a proof by Barretenberg. -pub fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { - // Barretenberg prepends the public inputs onto the proof so we need to remove - // the first `num_pub_inputs` field elements. - let num_bytes_to_remove = num_pub_inputs * (FieldElement::max_num_bytes() as usize); - proof[num_bytes_to_remove..].to_vec() -} - -/// Prepends a set of public inputs to a proof. -pub fn prepend_public_inputs(proof: Vec, public_inputs: WitnessMap) -> Vec { - // We omit any unassigned witnesses. - // Witness values should be ordered by their index but we skip over any indices without an assignment. - let public_inputs_bytes = - public_inputs.into_iter().flat_map(|(_, assignment)| assignment.to_be_bytes()); - - public_inputs_bytes.chain(proof).collect() -} diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index ea32c864a0b..646beaf0096 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -862,7 +862,11 @@ mod tests { let opcodes = vec![ Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, - Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, + Opcode::MemoryInit { + block_id: BlockId(0), + init: vec![], + block_type: acvm::acir::circuit::opcodes::BlockType::Memory, + }, Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, Opcode::AssertZero(Expression::default()), ]; diff --git a/tooling/debugger/tests/debug.rs b/tooling/debugger/tests/debug.rs index b104a2c84ac..313b6b30591 100644 --- a/tooling/debugger/tests/debug.rs +++ b/tooling/debugger/tests/debug.rs @@ -16,12 +16,6 @@ mod tests { let mut dbg_session = spawn_bash(Some(timeout_seconds * 1000)).expect("Could not start bash session"); - // Set backend to `/dev/null` to force an error if nargo tries to speak to a backend. - dbg_session - .send_line("export NARGO_BACKEND_PATH=/dev/null") - .expect("Could not export NARGO_BACKEND_PATH."); - dbg_session.wait_for_prompt().expect("Could not export NARGO_BACKEND_PATH."); - // Start debugger and test that it loads for the given program. dbg_session .execute( diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index 249406effaf..87327b01e36 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -27,9 +27,13 @@ impl BlackBoxFunctionSolver for WrapperSolver { fn multi_scalar_mul( &self, points: &[acvm::FieldElement], - scalars: &[acvm::FieldElement], - ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.multi_scalar_mul(points, scalars) + scalars_lo: &[acvm::FieldElement], + scalars_hi: &[acvm::FieldElement], + ) -> Result< + (acvm::FieldElement, acvm::FieldElement, acvm::FieldElement), + acvm::BlackBoxResolutionError, + > { + self.0.multi_scalar_mul(points, scalars_lo, scalars_hi) } fn pedersen_hash( @@ -44,10 +48,15 @@ impl BlackBoxFunctionSolver for WrapperSolver { &self, input1_x: &acvm::FieldElement, input1_y: &acvm::FieldElement, + input1_infinite: &acvm::FieldElement, input2_x: &acvm::FieldElement, input2_y: &acvm::FieldElement, - ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.ec_add(input1_x, input1_y, input2_x, input2_y) + input2_infinite: &acvm::FieldElement, + ) -> Result< + (acvm::FieldElement, acvm::FieldElement, acvm::FieldElement), + acvm::BlackBoxResolutionError, + > { + self.0.ec_add(input1_x, input1_y, input1_infinite, input2_x, input2_y, input2_infinite) } fn poseidon2_permutation( diff --git a/tooling/nargo/src/artifacts/contract.rs b/tooling/nargo/src/artifacts/contract.rs index 83bb4b94f82..a864da7c33c 100644 --- a/tooling/nargo/src/artifacts/contract.rs +++ b/tooling/nargo/src/artifacts/contract.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, HashMap}; use fm::FileId; -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractOutputsArtifact { pub structs: HashMap>, pub globals: HashMap>, @@ -21,7 +21,7 @@ impl From for ContractOutputsArtifact { } } -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, @@ -51,7 +51,7 @@ impl From for ContractArtifact { /// /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ContractFunctionArtifact { pub name: String, diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index 496896468cc..2570c3f5c9f 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -9,6 +9,7 @@ use std::{ }; pub use super::debug_vars::{DebugVars, StackFrame}; +use super::{contract::ContractArtifact, program::ProgramArtifact}; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -128,6 +129,16 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(program_artifact: ProgramArtifact) -> Self { + DebugArtifact { + debug_symbols: program_artifact.debug_symbols.debug_infos, + file_map: program_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl From for DebugArtifact { fn from(compiled_artifact: CompiledContract) -> Self { let all_functions_debug: Vec = compiled_artifact @@ -144,6 +155,22 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(compiled_artifact: ContractArtifact) -> Self { + let all_functions_debug: Vec = compiled_artifact + .functions + .into_iter() + .flat_map(|contract_function| contract_function.debug_symbols.debug_infos) + .collect(); + + DebugArtifact { + debug_symbols: all_functions_debug, + file_map: compiled_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl<'a> Files<'a> for DebugArtifact { type FileId = FileId; type Name = PathString; diff --git a/tooling/nargo/src/artifacts/program.rs b/tooling/nargo/src/artifacts/program.rs index 67ac9f53ec8..3c25b9e3345 100644 --- a/tooling/nargo/src/artifacts/program.rs +++ b/tooling/nargo/src/artifacts/program.rs @@ -8,7 +8,7 @@ use noirc_driver::DebugFile; use noirc_errors::debug_info::ProgramDebugInfo; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug)] pub struct ProgramArtifact { pub noir_version: String, diff --git a/tooling/nargo/src/constants.rs b/tooling/nargo/src/constants.rs index 0b50d61fe37..1048d86fcd7 100644 --- a/tooling/nargo/src/constants.rs +++ b/tooling/nargo/src/constants.rs @@ -13,8 +13,6 @@ pub const EXPORT_DIR: &str = "export"; // Files /// The file from which Nargo pulls prover inputs pub const PROVER_INPUT_FILE: &str = "Prover"; -/// The file from which Nargo pulls verifier inputs -pub const VERIFIER_INPUT_FILE: &str = "Verifier"; /// The package definition file for a Noir project. pub const PKG_FILE: &str = "Nargo.toml"; diff --git a/tooling/nargo/src/package.rs b/tooling/nargo/src/package.rs index ecbf3585210..44f0a3504f7 100644 --- a/tooling/nargo/src/package.rs +++ b/tooling/nargo/src/package.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, fmt::Display, path::PathBuf}; use noirc_frontend::graph::CrateName; -use crate::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use crate::constants::PROVER_INPUT_FILE; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PackageType { @@ -59,11 +59,6 @@ impl Package { // For now it is hard-coded to be toml. self.root_dir.join(format!("{PROVER_INPUT_FILE}.toml")) } - pub fn verifier_input_path(&self) -> PathBuf { - // TODO: This should be configurable, such as if we are looking for .json or .toml or custom paths - // For now it is hard-coded to be toml. - self.root_dir.join(format!("{VERIFIER_INPUT_FILE}.toml")) - } pub fn is_binary(&self) -> bool { self.package_type == PackageType::Binary diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index c20be037e62..d10dd6a22ff 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -43,11 +43,10 @@ thiserror.workspace = true tower.workspace = true async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } const_format.workspace = true -hex.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" -tokio = { version = "1.0", features = ["io-std"] } +tokio = { version = "1.0", features = ["io-std", "rt"] } dap.workspace = true clap-markdown = { git = "https://github.com/noir-lang/clap-markdown", rev = "450d759532c88f0dba70891ceecdbc9ff8f25d2b", optional = true } @@ -55,9 +54,6 @@ notify = "6.1.1" notify-debouncer-full = "0.3.1" termion = "3.0.0" -# Backends -backend-interface = { path = "../backend_interface" } - # Logs tracing-subscriber.workspace = true tracing-appender = "0.2.3" diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 0ed2d4c07f7..74042cf4e40 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -63,7 +63,6 @@ fn execution_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute").arg("--force"); @@ -101,7 +100,6 @@ fn execution_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute").arg("--force"); @@ -139,7 +137,6 @@ fn noir_test_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("test"); @@ -177,7 +174,6 @@ fn noir_test_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("test"); @@ -218,7 +214,6 @@ fn compile_success_empty_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("info"); cmd.arg("--json"); @@ -269,7 +264,6 @@ fn compile_success_contract_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile").arg("--force"); @@ -307,7 +301,6 @@ fn compile_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile").arg("--force"); diff --git a/tooling/nargo_cli/src/backends.rs b/tooling/nargo_cli/src/backends.rs deleted file mode 100644 index 2b3e9d8861f..00000000000 --- a/tooling/nargo_cli/src/backends.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::PathBuf; - -use backend_interface::backends_directory; -pub(crate) use backend_interface::Backend; - -fn active_backend_file_path() -> PathBuf { - backends_directory().join(".selected_backend") -} - -pub(crate) use backend_interface::ACVM_BACKEND_BARRETENBERG; - -pub(crate) fn clear_active_backend() { - let active_backend_file = active_backend_file_path(); - if active_backend_file.is_file() { - std::fs::remove_file(active_backend_file_path()) - .expect("should delete active backend file"); - } -} - -pub(crate) fn set_active_backend(backend_name: &str) { - let active_backend_file = active_backend_file_path(); - let backends_directory = - active_backend_file.parent().expect("active backend file should have parent"); - - std::fs::create_dir_all(backends_directory).expect("Could not create backends directory"); - std::fs::write(active_backend_file, backend_name.as_bytes()) - .expect("Could not write to active backend file"); -} - -pub(crate) fn get_active_backend() -> String { - let active_backend_file = active_backend_file_path(); - - if !active_backend_file.is_file() { - set_active_backend(ACVM_BACKEND_BARRETENBERG); - return ACVM_BACKEND_BARRETENBERG.to_string(); - } - - std::fs::read_to_string(active_backend_file).expect("Could not read active backend file") -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs deleted file mode 100644 index 5aba00764d3..00000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs +++ /dev/null @@ -1,13 +0,0 @@ -use clap::Args; - -use crate::{backends::get_active_backend, errors::CliError}; - -/// Prints the name of the currently active backend -#[derive(Debug, Clone, Args)] -pub(crate) struct CurrentCommand; - -pub(crate) fn run(_args: CurrentCommand) -> Result<(), CliError> { - println!("{}", get_active_backend()); - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs deleted file mode 100644 index 974db9ff7f5..00000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs +++ /dev/null @@ -1,30 +0,0 @@ -use clap::Args; - -use backend_interface::{backends_directory, download_backend}; - -use crate::errors::{BackendError, CliError}; - -use super::ls_cmd::get_available_backends; - -/// Install a new backend from a URL. -#[derive(Debug, Clone, Args)] -pub(crate) struct InstallCommand { - /// The name of the backend to install. - backend: String, - - /// The URL from which to download the backend. - url: String, -} - -pub(crate) fn run(args: InstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if installed_backends.contains(&args.backend) { - return Err(BackendError::AlreadyInstalled(args.backend).into()); - } - - download_backend(&args.url, &backends_directory().join(args.backend).join("backend_binary")) - .map_err(BackendError::from)?; - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs deleted file mode 100644 index da37b104d65..00000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs +++ /dev/null @@ -1,34 +0,0 @@ -use backend_interface::backends_directory; -use clap::Args; - -use crate::errors::CliError; - -/// Prints the list of currently installed backends -#[derive(Debug, Clone, Args)] -pub(crate) struct LsCommand; - -pub(crate) fn run(_args: LsCommand) -> Result<(), CliError> { - for backend in get_available_backends() { - println!("{backend}"); - } - - Ok(()) -} - -pub(super) fn get_available_backends() -> Vec { - let backend_directory_contents = std::fs::read_dir(backends_directory()) - .expect("Could not read backends directory contents"); - - // TODO: Highlight the currently active backend. - backend_directory_contents - .into_iter() - .filter_map(|entry| { - let path = entry.ok()?.path(); - if path.is_dir() { - path.file_name().map(|name| name.to_string_lossy().to_string()) - } else { - None - } - }) - .collect() -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/mod.rs b/tooling/nargo_cli/src/cli/backend_cmd/mod.rs deleted file mode 100644 index 985dbbdb934..00000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/mod.rs +++ /dev/null @@ -1,41 +0,0 @@ -use clap::{Args, Subcommand}; - -use crate::errors::CliError; - -mod current_cmd; -mod install_cmd; -mod ls_cmd; -mod uninstall_cmd; -mod use_cmd; - -#[non_exhaustive] -#[derive(Args, Clone, Debug)] -/// Install and select custom backends used to generate and verify proofs. -pub(crate) struct BackendCommand { - #[command(subcommand)] - command: BackendCommands, -} - -#[non_exhaustive] -#[derive(Subcommand, Clone, Debug)] -pub(crate) enum BackendCommands { - Current(current_cmd::CurrentCommand), - Ls(ls_cmd::LsCommand), - Use(use_cmd::UseCommand), - Install(install_cmd::InstallCommand), - Uninstall(uninstall_cmd::UninstallCommand), -} - -pub(crate) fn run(cmd: BackendCommand) -> Result<(), CliError> { - let BackendCommand { command } = cmd; - - match command { - BackendCommands::Current(args) => current_cmd::run(args), - BackendCommands::Ls(args) => ls_cmd::run(args), - BackendCommands::Use(args) => use_cmd::run(args), - BackendCommands::Install(args) => install_cmd::run(args), - BackendCommands::Uninstall(args) => uninstall_cmd::run(args), - }?; - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs deleted file mode 100644 index 7497f1bc2f6..00000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs +++ /dev/null @@ -1,59 +0,0 @@ -use clap::Args; - -use backend_interface::backends_directory; - -use crate::{ - backends::{ - clear_active_backend, get_active_backend, set_active_backend, ACVM_BACKEND_BARRETENBERG, - }, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Uninstalls a backend -#[derive(Debug, Clone, Args)] -pub(crate) struct UninstallCommand { - /// The name of the backend to uninstall. - backend: String, -} - -pub(crate) fn run(args: UninstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if !installed_backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - let active_backend = get_active_backend(); - - // Handle the case where we're uninstalling the currently active backend. - if active_backend == args.backend { - let barretenberg_is_installed = - installed_backends.iter().any(|backend_name| backend_name == ACVM_BACKEND_BARRETENBERG); - - let new_active_backend = - if args.backend != ACVM_BACKEND_BARRETENBERG && barretenberg_is_installed { - // Prefer switching to barretenberg if possible. - Some(ACVM_BACKEND_BARRETENBERG) - } else { - // Otherwise pick the first backend which isn't being uninstalled. - installed_backends - .iter() - .find(|&backend_name| backend_name != &args.backend) - .map(|name| name.as_str()) - }; - - if let Some(backend) = new_active_backend { - set_active_backend(backend); - } else { - // We've deleted the last backend. Clear the active backend file to be recreated once we install a new one. - clear_active_backend(); - } - } - - std::fs::remove_dir_all(backends_directory().join(args.backend)) - .expect("backend directory should be deleted"); - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs deleted file mode 100644 index 66a129c2148..00000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs +++ /dev/null @@ -1,26 +0,0 @@ -use clap::Args; - -use crate::{ - backends::set_active_backend, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Select the backend to use -#[derive(Debug, Clone, Args)] -pub(crate) struct UseCommand { - backend: String, -} - -pub(crate) fn run(args: UseCommand) -> Result<(), CliError> { - let backends = get_available_backends(); - - if !backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - set_active_backend(&args.backend); - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index d5313d96076..e2e1f147b90 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -95,13 +95,11 @@ fn check_package( Ok(false) } else { // XXX: We can have a --overwrite flag to determine if you want to overwrite the Prover/Verifier.toml files - if let Some((parameters, return_type)) = compute_function_abi(&context, &crate_id) { + if let Some((parameters, _)) = compute_function_abi(&context, &crate_id) { let path_to_prover_input = package.prover_input_path(); - let path_to_verifier_input = package.verifier_input_path(); // Before writing the file, check if it exists and whether overwrite is set let should_write_prover = !path_to_prover_input.exists() || allow_overwrite; - let should_write_verifier = !path_to_verifier_input.exists() || allow_overwrite; if should_write_prover { let prover_toml = create_input_toml_template(parameters.clone(), None); @@ -110,19 +108,7 @@ fn check_package( eprintln!("Note: Prover.toml already exists. Use --overwrite to force overwrite."); } - if should_write_verifier { - let public_inputs = - parameters.into_iter().filter(|param| param.is_public()).collect(); - - let verifier_toml = create_input_toml_template(public_inputs, return_type); - write_to_file(verifier_toml.as_bytes(), &path_to_verifier_input); - } else { - eprintln!( - "Note: Verifier.toml already exists. Use --overwrite to force overwrite." - ); - } - - let any_file_written = should_write_prover || should_write_verifier; + let any_file_written = should_write_prover; Ok(any_file_written) } else { diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs deleted file mode 100644 index 8c64d9cd935..00000000000 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ /dev/null @@ -1,83 +0,0 @@ -use super::fs::{create_named_dir, write_to_file}; -use super::NargoConfig; -use crate::backends::Backend; -use crate::errors::CliError; - -use clap::Args; -use nargo::ops::{compile_program, report_errors}; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -/// Generates a Solidity verifier smart contract for the program -#[derive(Debug, Clone, Args)] -pub(crate) struct CodegenVerifierCommand { - /// The name of the package to codegen - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Codegen all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: CodegenVerifierCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let program = nargo::ops::transform_program(program, args.compile_options.expression_width); - - // TODO(https://github.com/noir-lang/noir/issues/4428): - // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. - // However, in the future we can expect to possibly have non-inlined ACIR functions during compilation - // that will be inlined at a later step such as by the ACVM compiler or by the backend. - // Add appropriate handling here once the compiler enables multiple ACIR functions. - assert_eq!(program.program.functions.len(), 1); - let smart_contract_string = backend.eth_contract(&program.program)?; - - let contract_dir = workspace.contracts_directory_path(package); - create_named_dir(&contract_dir, "contract"); - let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); - - let path = write_to_file(smart_contract_string.as_bytes(), &contract_path); - println!("[{}] Contract successfully created and located at {path}", package.name); - } - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 2f878406939..ecf2e2e9f53 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -22,7 +22,6 @@ use notify_debouncer_full::new_debouncer; use crate::errors::CliError; -use super::fs::program::only_acir; use super::fs::program::{read_program_from_file, save_contract_to_file, save_program_to_file}; use super::NargoConfig; use rayon::prelude::*; @@ -111,7 +110,7 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } -fn compile_workspace_full( +pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { @@ -136,10 +135,9 @@ fn compile_workspace_full( .partition(|package| package.is_binary()); // Save build artifacts to disk. - let only_acir = compile_options.only_acir; for (package, program) in binary_packages.into_iter().zip(compiled_programs) { let program = nargo::ops::transform_program(program, compile_options.expression_width); - save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); + save_program(program.clone(), &package, &workspace.target_directory_path()); } let circuit_dir = workspace.target_directory_path(); for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { @@ -197,18 +195,9 @@ pub(super) fn compile_workspace( } } -pub(super) fn save_program( - program: CompiledProgram, - package: &Package, - circuit_dir: &Path, - only_acir_opt: bool, -) { - if only_acir_opt { - only_acir(program.program, circuit_dir); - } else { - let program_artifact = ProgramArtifact::from(program.clone()); - save_program_to_file(&program_artifact, &package.name, circuit_dir); - } +pub(super) fn save_program(program: CompiledProgram, package: &Package, circuit_dir: &Path) { + let program_artifact = ProgramArtifact::from(program.clone()); + save_program_to_file(&program_artifact, &package.name, circuit_dir); } fn save_contract(contract: CompiledContract, package: &Package, circuit_dir: &Path) { diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index 68f902dfe33..862a46884ef 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,19 +5,18 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::{compile_program, report_errors, DefaultForeignCallExecutor}; +use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -59,32 +58,16 @@ pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliEr )?; let target_dir = &workspace.target_directory_path(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); let (return_value, witness_stack) = execute_program_and_decode( - compiled_program, + program, package, &args.prover_name, args.oracle_resolver.as_deref(), diff --git a/tooling/nargo_cli/src/cli/fs/inputs.rs b/tooling/nargo_cli/src/cli/fs/inputs.rs index bd038c51ad5..dee9a00507c 100644 --- a/tooling/nargo_cli/src/cli/fs/inputs.rs +++ b/tooling/nargo_cli/src/cli/fs/inputs.rs @@ -6,8 +6,6 @@ use std::{collections::BTreeMap, path::Path}; use crate::errors::FilesystemError; -use super::write_to_file; - /// Returns the circuit's parameters and its return value, if one exists. /// # Examples /// @@ -36,99 +34,3 @@ pub(crate) fn read_inputs_from_file>( Ok((input_map, return_value)) } - -pub(crate) fn write_inputs_to_file>( - input_map: &InputMap, - return_value: &Option, - abi: &Abi, - path: P, - file_name: &str, - format: Format, -) -> Result<(), FilesystemError> { - let file_path = path.as_ref().join(file_name).with_extension(format.ext()); - - // We must insert the return value into the `InputMap` in order for it to be written to file. - let serialized_output = match return_value { - // Parameters and return values are kept separate except for when they're being written to file. - // As a result, we don't want to modify the original map and must clone it before insertion. - Some(return_value) => { - let mut input_map = input_map.clone(); - input_map.insert(MAIN_RETURN_NAME.to_owned(), return_value.clone()); - format.serialize(&input_map, abi)? - } - // If no return value exists, then we can serialize the original map directly. - None => format.serialize(input_map, abi)?, - }; - - write_to_file(serialized_output.as_bytes(), &file_path); - - Ok(()) -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeMap, vec}; - - use acvm::FieldElement; - use nargo::constants::VERIFIER_INPUT_FILE; - use noirc_abi::{ - input_parser::{Format, InputValue}, - Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, - }; - use tempfile::TempDir; - - use super::{read_inputs_from_file, write_inputs_to_file}; - - #[test] - fn write_and_read_recovers_inputs_and_return_value() { - let input_dir = TempDir::new().unwrap().into_path(); - - // We purposefully test a simple ABI here as we're focussing on `fs`. - // Tests for serializing complex types should exist in `noirc_abi`. - let abi = Abi { - parameters: vec![ - AbiParameter { - name: "foo".into(), - typ: AbiType::Field, - visibility: AbiVisibility::Public, - }, - AbiParameter { - name: "bar".into(), - typ: AbiType::String { length: 11 }, - visibility: AbiVisibility::Private, - }, - ], - return_type: Some(AbiReturnType { - abi_type: AbiType::Field, - visibility: AbiVisibility::Public, - }), - - // Input serialization is only dependent on types, not position in witness map. - // Neither of these should be relevant so we leave them empty. - param_witnesses: BTreeMap::new(), - return_witnesses: Vec::new(), - error_types: BTreeMap::new(), - }; - let input_map = BTreeMap::from([ - ("foo".to_owned(), InputValue::Field(42u128.into())), - ("bar".to_owned(), InputValue::String("hello world".to_owned())), - ]); - let return_value = Some(InputValue::Field(FieldElement::zero())); - - write_inputs_to_file( - &input_map, - &return_value, - &abi, - &input_dir, - VERIFIER_INPUT_FILE, - Format::Toml, - ) - .unwrap(); - - let (loaded_inputs, loaded_return_value) = - read_inputs_from_file(input_dir, VERIFIER_INPUT_FILE, Format::Toml, &abi).unwrap(); - - assert_eq!(loaded_inputs, input_map); - assert_eq!(loaded_return_value, return_value); - } -} diff --git a/tooling/nargo_cli/src/cli/fs/mod.rs b/tooling/nargo_cli/src/cli/fs/mod.rs index 4ebce3b3325..8658bd5b248 100644 --- a/tooling/nargo_cli/src/cli/fs/mod.rs +++ b/tooling/nargo_cli/src/cli/fs/mod.rs @@ -4,11 +4,8 @@ use std::{ path::{Path, PathBuf}, }; -use crate::errors::FilesystemError; - pub(super) mod inputs; pub(super) mod program; -pub(super) mod proof; pub(super) mod witness; pub(super) fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { @@ -31,12 +28,3 @@ pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { Ok(_) => display.to_string(), } } - -pub(super) fn load_hex_data>(path: P) -> Result, FilesystemError> { - let hex_data: Vec<_> = std::fs::read(&path) - .map_err(|_| FilesystemError::PathNotValid(path.as_ref().to_path_buf()))?; - - let raw_bytes = hex::decode(hex_data).map_err(FilesystemError::HexArtifactNotValid)?; - - Ok(raw_bytes) -} diff --git a/tooling/nargo_cli/src/cli/fs/program.rs b/tooling/nargo_cli/src/cli/fs/program.rs index 77005e8d5af..ba017651667 100644 --- a/tooling/nargo_cli/src/cli/fs/program.rs +++ b/tooling/nargo_cli/src/cli/fs/program.rs @@ -1,6 +1,5 @@ use std::path::{Path, PathBuf}; -use acvm::acir::circuit::Program; use nargo::artifacts::{contract::ContractArtifact, program::ProgramArtifact}; use noirc_frontend::graph::CrateName; @@ -17,16 +16,6 @@ pub(crate) fn save_program_to_file>( save_build_artifact_to_file(program_artifact, &circuit_name, circuit_dir) } -/// Writes the bytecode as acir.gz -pub(crate) fn only_acir>(program: Program, circuit_dir: P) -> PathBuf { - create_named_dir(circuit_dir.as_ref(), "target"); - let circuit_path = circuit_dir.as_ref().join("acir").with_extension("gz"); - let bytes = Program::serialize_program(&program); - write_to_file(&bytes, &circuit_path); - - circuit_path -} - pub(crate) fn save_contract_to_file>( compiled_contract: &ContractArtifact, circuit_name: &str, diff --git a/tooling/nargo_cli/src/cli/fs/proof.rs b/tooling/nargo_cli/src/cli/fs/proof.rs deleted file mode 100644 index d2b3050708b..00000000000 --- a/tooling/nargo_cli/src/cli/fs/proof.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::path::{Path, PathBuf}; - -use nargo::constants::PROOF_EXT; - -use crate::errors::FilesystemError; - -use super::{create_named_dir, write_to_file}; - -pub(crate) fn save_proof_to_dir>( - proof: &[u8], - proof_name: &str, - proof_dir: P, -) -> Result { - create_named_dir(proof_dir.as_ref(), "proof"); - let proof_path = proof_dir.as_ref().join(proof_name).with_extension(PROOF_EXT); - - write_to_file(hex::encode(proof).as_bytes(), &proof_path); - - Ok(proof_path) -} diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index cac3c36f904..11cf6e22ab5 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,28 +1,25 @@ use std::collections::HashMap; -use acvm::acir::circuit::{ExpressionWidth, Program}; -use backend_interface::BackendError; +use acvm::acir::circuit::ExpressionWidth; use clap::Args; use iter_extended::vecmap; use nargo::{ - artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - ops::report_errors, package::Package, parse_all, + artifacts::{debug::DebugArtifact, program::ProgramArtifact}, + package::Package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, - NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; use prettytable::{row, table, Row}; use rayon::prelude::*; use serde::Serialize; -use crate::backends::Backend; use crate::errors::CliError; -use super::{compile_cmd::compile_workspace, NargoConfig}; +use super::{ + compile_cmd::compile_workspace_full, fs::program::read_program_from_file, NargoConfig, +}; /// Provides detailed information on each of a program's function (represented by a single circuit) /// @@ -51,11 +48,7 @@ pub(crate) struct InfoCommand { compile_options: CompileOptions, } -pub(crate) fn run( - backend: &Backend, - args: InfoCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -66,79 +59,42 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let compiled_workspace = compile_workspace( - &workspace_file_manager, - &parsed_files, - &workspace, - &args.compile_options, - ); - - let (compiled_programs, compiled_contracts) = report_errors( - compiled_workspace, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let compiled_programs = vecmap(compiled_programs, |program| { - nargo::ops::transform_program(program, args.compile_options.expression_width) - }); - let compiled_contracts = vecmap(compiled_contracts, |contract| { - nargo::ops::transform_contract(contract, args.compile_options.expression_width) - }); + let binary_packages: Vec<(Package, ProgramArtifact)> = workspace + .into_iter() + .filter(|package| package.is_binary()) + .map(|package| -> Result<(Package, ProgramArtifact), CliError> { + let program_artifact_path = workspace.package_build_path(package); + let program = read_program_from_file(program_artifact_path)?; + Ok((package.clone(), program)) + }) + .collect::>()?; if args.profile_info { - for compiled_program in &compiled_programs { + for (_, compiled_program) in &binary_packages { let debug_artifact = DebugArtifact::from(compiled_program.clone()); - for function_debug in compiled_program.debug.iter() { + for function_debug in compiled_program.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } } - - for compiled_contract in &compiled_contracts { - let debug_artifact = DebugArtifact::from(compiled_contract.clone()); - let functions = &compiled_contract.functions; - for contract_function in functions { - for function_debug in contract_function.debug.iter() { - let span_opcodes = function_debug.count_span_opcodes(); - print_span_opcodes(span_opcodes, &debug_artifact); - } - } - } } - let binary_packages = - workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); - let program_info = binary_packages + .into_iter() .par_bridge() .map(|(package, program)| { count_opcodes_and_gates_in_program( - backend, program, - package, - args.compile_options.expression_width, - ) - }) - .collect::>()?; - - let contract_info = compiled_contracts - .into_par_iter() - .map(|contract| { - count_opcodes_and_gates_in_contract( - backend, - contract, + &package, args.compile_options.expression_width, ) }) - .collect::>()?; + .collect(); - let info_report = InfoReport { programs: program_info, contracts: contract_info }; + let info_report = InfoReport { programs: program_info, contracts: Vec::new() }; if args.json { // Expose machine-readable JSON data. @@ -156,23 +112,6 @@ pub(crate) fn run( } program_table.printstd(); } - if !info_report.contracts.is_empty() { - let mut contract_table = table!([ - Fm->"Contract", - Fm->"Function", - Fm->"Expression Width", - Fm->"ACIR Opcodes", - Fm->"Backend Circuit Size" - ]); - for contract_info in info_report.contracts { - let contract_rows: Vec = contract_info.into(); - for row in contract_rows { - contract_table.add_row(row); - } - } - - contract_table.printstd(); - } } Ok(()) @@ -249,7 +188,6 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", program_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } @@ -268,7 +206,6 @@ struct ContractInfo { struct FunctionInfo { name: String, acir_opcodes: usize, - circuit_size: u32, } impl From for Vec { @@ -279,56 +216,26 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", contract_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } } fn count_opcodes_and_gates_in_program( - backend: &Backend, - compiled_program: CompiledProgram, + compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, -) -> Result { +) -> ProgramInfo { let functions = compiled_program - .program + .bytecode .functions .into_par_iter() .enumerate() - .map(|(i, function)| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: compiled_program.names[i].clone(), - acir_opcodes: function.opcodes.len(), - // Unconstrained functions do not matter to a backend circuit count so we pass nothing here - circuit_size: backend.get_exact_circuit_size(&Program { - functions: vec![function], - unconstrained_functions: Vec::new(), - })?, - }) - }) - .collect::>()?; - - Ok(ProgramInfo { package_name: package.name.to_string(), expression_width, functions }) -} - -fn count_opcodes_and_gates_in_contract( - backend: &Backend, - contract: CompiledContract, - expression_width: ExpressionWidth, -) -> Result { - let functions = contract - .functions - .into_par_iter() - .map(|function| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: function.name, - // TODO(https://github.com/noir-lang/noir/issues/4720) - acir_opcodes: function.bytecode.functions[0].opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, - }) + .map(|(i, function)| FunctionInfo { + name: compiled_program.names[i].clone(), + acir_opcodes: function.opcodes.len(), }) - .collect::>()?; + .collect(); - Ok(ContractInfo { name: contract.name, expression_width, functions }) + ProgramInfo { package_name: package.name.to_string(), expression_width, functions } } diff --git a/tooling/nargo_cli/src/cli/mod.rs b/tooling/nargo_cli/src/cli/mod.rs index ad778549ac0..485ccc7abaf 100644 --- a/tooling/nargo_cli/src/cli/mod.rs +++ b/tooling/nargo_cli/src/cli/mod.rs @@ -6,13 +6,9 @@ use std::path::PathBuf; use color_eyre::eyre; -use crate::backends::get_active_backend; - mod fs; -mod backend_cmd; mod check_cmd; -mod codegen_verifier_cmd; mod compile_cmd; mod dap_cmd; mod debug_cmd; @@ -23,9 +19,7 @@ mod info_cmd; mod init_cmd; mod lsp_cmd; mod new_cmd; -mod prove_cmd; mod test_cmd; -mod verify_cmd; const GIT_HASH: &str = env!("GIT_COMMIT"); const IS_DIRTY: &str = env!("GIT_DIRTY"); @@ -60,10 +54,8 @@ pub(crate) struct NargoConfig { #[non_exhaustive] #[derive(Subcommand, Clone, Debug)] enum NargoCommand { - Backend(backend_cmd::BackendCommand), Check(check_cmd::CheckCommand), Fmt(fmt_cmd::FormatCommand), - CodegenVerifier(codegen_verifier_cmd::CodegenVerifierCommand), #[command(alias = "build")] Compile(compile_cmd::CompileCommand), New(new_cmd::NewCommand), @@ -73,8 +65,6 @@ enum NargoCommand { Export(export_cmd::ExportCommand), #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), Lsp(lsp_cmd::LspCommand), @@ -94,18 +84,11 @@ pub(crate) fn start_cli() -> eyre::Result<()> { // Search through parent directories to find package root if necessary. if !matches!( command, - NargoCommand::New(_) - | NargoCommand::Init(_) - | NargoCommand::Lsp(_) - | NargoCommand::Backend(_) - | NargoCommand::Dap(_) + NargoCommand::New(_) | NargoCommand::Init(_) | NargoCommand::Lsp(_) | NargoCommand::Dap(_) ) { config.program_dir = find_package_root(&config.program_dir)?; } - let active_backend = get_active_backend(); - let backend = crate::backends::Backend::new(active_backend); - match command { NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), @@ -114,12 +97,8 @@ pub(crate) fn start_cli() -> eyre::Result<()> { NargoCommand::Debug(args) => debug_cmd::run(args, config), NargoCommand::Execute(args) => execute_cmd::run(args, config), NargoCommand::Export(args) => export_cmd::run(args, config), - NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), - NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), NargoCommand::Test(args) => test_cmd::run(args, config), - NargoCommand::Info(args) => info_cmd::run(&backend, args, config), - NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), - NargoCommand::Backend(args) => backend_cmd::run(args), + NargoCommand::Info(args) => info_cmd::run(args, config), NargoCommand::Lsp(args) => lsp_cmd::run(args, config), NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs deleted file mode 100644 index 47c71527fd8..00000000000 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ /dev/null @@ -1,154 +0,0 @@ -use clap::Args; -use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; -use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; -use noirc_frontend::graph::CrateName; - -use super::fs::{ - inputs::{read_inputs_from_file, write_inputs_to_file}, - proof::save_proof_to_dir, -}; -use super::NargoConfig; -use crate::{backends::Backend, cli::execute_cmd::execute_program, errors::CliError}; - -/// Create proof for this program. The proof is returned as a hex encoded string. -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "p")] -pub(crate) struct ProveCommand { - /// The name of the toml file which contains the inputs for the prover - #[clap(long, short, default_value = PROVER_INPUT_FILE)] - prover_name: String, - - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// Verify proof after proving - #[arg(long)] - verify: bool, - - /// The name of the package to prove - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Prove all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, - - /// JSON RPC url to solve oracle calls - #[clap(long)] - oracle_resolver: Option, -} - -pub(crate) fn run( - backend: &Backend, - args: ProveCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); - - prove_package( - backend, - &workspace, - package, - compiled_program, - &args.prover_name, - &args.verifier_name, - args.verify, - args.oracle_resolver.as_deref(), - )?; - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub(crate) fn prove_package( - backend: &Backend, - workspace: &Workspace, - package: &Package, - compiled_program: CompiledProgram, - prover_name: &str, - verifier_name: &str, - check_proof: bool, - foreign_call_resolver_url: Option<&str>, -) -> Result<(), CliError> { - // Parse the initial witness values from Prover.toml - let (inputs_map, _) = - read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - - let witness_stack = execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; - - // Write public inputs into Verifier.toml - let public_abi = compiled_program.abi.public_abi(); - // Get the entry point witness for the ABI - let main_witness = - &witness_stack.peek().expect("Should have at least one witness on the stack").witness; - let (public_inputs, return_value) = public_abi.decode(main_witness)?; - - write_inputs_to_file( - &public_inputs, - &return_value, - &public_abi, - &package.root_dir, - verifier_name, - Format::Toml, - )?; - - let proof = backend.prove(&compiled_program.program, witness_stack)?; - - if check_proof { - let public_inputs = public_abi.encode(&public_inputs, return_value)?; - let valid_proof = backend.verify(&proof, public_inputs, &compiled_program.program)?; - - if !valid_proof { - return Err(CliError::InvalidProof("".into())); - } - } - - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs deleted file mode 100644 index a6078f6c1d3..00000000000 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ /dev/null @@ -1,109 +0,0 @@ -use super::fs::{inputs::read_inputs_from_file, load_hex_data}; -use super::NargoConfig; -use crate::{backends::Backend, errors::CliError}; - -use clap::Args; -use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; -use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; -use noirc_frontend::graph::CrateName; - -/// Given a proof and a program, verify whether the proof is valid -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "v")] -pub(crate) struct VerifyCommand { - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// The name of the package verify - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Verify all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: VerifyCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); - - verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; - } - - Ok(()) -} - -fn verify_package( - backend: &Backend, - workspace: &Workspace, - package: &Package, - compiled_program: CompiledProgram, - verifier_name: &str, -) -> Result<(), CliError> { - // Load public inputs (if any) from `verifier_name`. - let public_abi = compiled_program.abi.public_abi(); - let (public_inputs_map, return_value) = - read_inputs_from_file(&package.root_dir, verifier_name, Format::Toml, &public_abi)?; - - let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; - - let proof_path = - workspace.proofs_directory_path().join(package.name.to_string()).with_extension(PROOF_EXT); - - let proof = load_hex_data(&proof_path)?; - - let valid_proof = backend.verify(&proof, public_inputs, &compiled_program.program)?; - - if valid_proof { - Ok(()) - } else { - Err(CliError::InvalidProof(proof_path)) - } -} diff --git a/tooling/nargo_cli/src/errors.rs b/tooling/nargo_cli/src/errors.rs index 40fb7886405..3e0b13a9cbc 100644 --- a/tooling/nargo_cli/src/errors.rs +++ b/tooling/nargo_cli/src/errors.rs @@ -1,5 +1,4 @@ use acvm::acir::native_types::WitnessStackError; -use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; use noir_debugger::errors::DapError; @@ -11,8 +10,7 @@ use thiserror::Error; pub(crate) enum FilesystemError { #[error("Error: {} is not a valid path\nRun either `nargo compile` to generate missing build artifacts or `nargo prove` to construct a proof", .0.display())] PathNotValid(PathBuf), - #[error("Error: could not parse hex build artifact (proof, proving and/or verification keys, ACIR checksum) ({0})")] - HexArtifactNotValid(FromHexError), + #[error( " Error: cannot find {0}.toml file.\n Expected location: {1:?} \n Please generate this file at the expected location." )] @@ -37,9 +35,6 @@ pub(crate) enum CliError { #[error("Error: destination {} already exists", .0.display())] DestinationAlreadyExists(PathBuf), - #[error("Failed to verify proof {}", .0.display())] - InvalidProof(PathBuf), - #[error("Invalid package name {0}. Did you mean to use `--name`?")] InvalidPackageName(String), @@ -68,24 +63,4 @@ pub(crate) enum CliError { /// Error from the compilation pipeline #[error(transparent)] CompileError(#[from] CompileError), - - /// Error related to backend selection/installation. - #[error(transparent)] - BackendError(#[from] BackendError), - - /// Error related to communication with backend. - #[error(transparent)] - BackendCommunicationError(#[from] backend_interface::BackendError), -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum BackendError { - #[error("No backend is installed with the name {0}")] - UnknownBackend(String), - - #[error("The backend {0} is already installed")] - AlreadyInstalled(String), - - #[error("Backend installation failed: {0}")] - InstallationError(#[from] std::io::Error), } diff --git a/tooling/nargo_cli/src/main.rs b/tooling/nargo_cli/src/main.rs index 6e2b7069bc4..a407d467ced 100644 --- a/tooling/nargo_cli/src/main.rs +++ b/tooling/nargo_cli/src/main.rs @@ -7,7 +7,6 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. -mod backends; mod cli; mod errors; diff --git a/tooling/nargo_cli/tests/codegen-verifier.rs b/tooling/nargo_cli/tests/codegen-verifier.rs deleted file mode 100644 index f991f72b108..00000000000 --- a/tooling/nargo_cli/tests/codegen-verifier.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! This integration test aims to check that the `nargo codegen-verifier` will successfully create a -//! file containing a verifier for a simple program. - -use assert_cmd::prelude::*; -use predicates::prelude::*; -use std::process::Command; - -use assert_fs::prelude::{PathAssert, PathChild}; - -#[test] -fn simple_verifier_codegen() { - let test_dir = assert_fs::TempDir::new().unwrap(); - std::env::set_current_dir(&test_dir).unwrap(); - - // Create trivial program - let project_name = "hello_world"; - let project_dir = test_dir.child(project_name); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("new").arg(project_name); - cmd.assert().success(); - - std::env::set_current_dir(&project_dir).unwrap(); - - // Run `nargo codegen-verifier` - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("codegen-verifier"); - cmd.assert() - .success() - .stdout(predicate::str::contains("Contract successfully created and located at")); - - project_dir - .child("contract") - .child("hello_world") - .child("plonk_vk.sol") - .assert(predicate::path::is_file()); -} diff --git a/tooling/nargo_cli/tests/hello_world.rs b/tooling/nargo_cli/tests/hello_world.rs index 9fcb0c873e1..6b6931542b5 100644 --- a/tooling/nargo_cli/tests/hello_world.rs +++ b/tooling/nargo_cli/tests/hello_world.rs @@ -34,22 +34,11 @@ fn hello_world_example() { .stdout(predicate::str::contains("Constraint system successfully built!")); project_dir.child("Prover.toml").assert(predicate::path::is_file()); - project_dir.child("Verifier.toml").assert(predicate::path::is_file()); - // `nargo prove` + // `nargo execute` project_dir.child("Prover.toml").write_str("x = 1\ny = 2").unwrap(); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("prove"); - cmd.assert().success(); - - project_dir - .child("proofs") - .child(format!("{project_name}.proof")) - .assert(predicate::path::is_file()); - - // `nargo verify p` - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("verify"); + cmd.arg("execute"); cmd.assert().success(); } diff --git a/tooling/nargo_fmt/src/config.rs b/tooling/nargo_fmt/src/config.rs index 2bb5d97c0af..5e38dc7d8b0 100644 --- a/tooling/nargo_fmt/src/config.rs +++ b/tooling/nargo_fmt/src/config.rs @@ -45,7 +45,7 @@ config! { max_width: usize, 100, "Maximum width of each line"; tab_spaces: usize, 4, "Number of spaces per tab"; remove_nested_parens: bool, true, "Remove nested parens"; - error_on_lost_comment: bool, true, "Error if unable to get comments"; + error_on_lost_comment: bool, false, "Error if unable to get comments"; short_array_element_width_threshold: usize, 10, "Width threshold for an array element to be considered short"; array_width: usize, 100, "Maximum width of an array literal before falling back to vertical formatting"; fn_call_width: usize, 60, "Maximum width of the args of a function call before falling back to vertical formatting"; diff --git a/tooling/noir_codegen/src/index.ts b/tooling/noir_codegen/src/index.ts index fbbab07bcfe..d00990f01bc 100644 --- a/tooling/noir_codegen/src/index.ts +++ b/tooling/noir_codegen/src/index.ts @@ -1,66 +1,17 @@ -import { AbiType } from '@noir-lang/noirc_abi'; import { CompiledCircuit } from '@noir-lang/types'; -import { PrimitiveTypesUsed, generateTsInterface, codegenStructDefinitions } from './noir_types.js'; - -// TODO: reenable this. See `abiTypeToTs` for reasoning. -// export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }; - -const codegenPrelude = `/* Autogenerated file, do not edit! */ - -/* eslint-disable */ - -import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" - -export { ForeignCallHandler } from "@noir-lang/noir_js" -`; - -const codegenFunction = ( - name: string, - compiled_program: CompiledCircuit, - function_signature: { inputs: [string, string][]; returnValue: string | null }, -) => { - const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); - const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); - - return `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; - -export async function ${name}(${args_with_types}, foreignCallHandler?: ForeignCallHandler): Promise<${ - function_signature.returnValue - }> { - const program = new Noir(${name}_circuit); - const args: InputMap = { ${args} }; - const { returnValue } = await program.execute(args, foreignCallHandler); - return returnValue as ${function_signature.returnValue}; -} -`; +import { TypingsGenerator } from './utils/typings_generator.js'; + +export const codegen = ( + programs: [string, CompiledCircuit][], + embedArtifact: boolean, + useFixedLengthArrays: boolean, +): string => { + return new TypingsGenerator( + programs.map((program) => ({ + circuitName: program[0], + artifact: embedArtifact ? program[1] : undefined, + abi: structuredClone(program[1].abi), // We'll mutate the ABI types when doing typescript codegen, so we clone it to avoid mutating the artifact. + })), + useFixedLengthArrays, + ).codegen(); }; - -export const codegen = (programs: [string, CompiledCircuit][]): string => { - let results = [codegenPrelude]; - const primitiveTypeMap = new Map(); - const structTypeMap = new Map(); - - const functions: string[] = []; - for (const [name, program] of programs) { - const function_sig = generateTsInterface(program.abi, structTypeMap, primitiveTypeMap); - functions.push(codegenFunction(name, stripUnwantedFields(program), function_sig)); - } - - const structTypeDefinitions: string = codegenStructDefinitions(structTypeMap, primitiveTypeMap); - - // Add the primitive Noir types that do not have a 1-1 mapping to TypeScript. - const primitiveTypeAliases: string[] = []; - for (const value of primitiveTypeMap.values()) { - primitiveTypeAliases.push(`export type ${value.aliasName} = ${value.tsType};`); - } - - results = results.concat(...primitiveTypeAliases, '', structTypeDefinitions, ...functions); - - return results.join('\n'); -}; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function stripUnwantedFields(value: any): CompiledCircuit { - const { abi, bytecode } = value; - return { abi, bytecode }; -} diff --git a/tooling/noir_codegen/src/main.ts b/tooling/noir_codegen/src/main.ts index 591e7420dba..835b24a9e48 100644 --- a/tooling/noir_codegen/src/main.ts +++ b/tooling/noir_codegen/src/main.ts @@ -24,7 +24,7 @@ function main() { return [program_name, { abi, bytecode }]; }); - const result = codegen(programs); + const result = codegen(programs, !cliConfig.externalArtifact, cliConfig.useFixedLengthArrays); const outputDir = path.resolve(cliConfig.outDir ?? './codegen'); const outputFile = path.join(outputDir, 'index.ts'); diff --git a/tooling/noir_codegen/src/noir_types.ts b/tooling/noir_codegen/src/noir_types.ts deleted file mode 100644 index 0c0e2b7c60f..00000000000 --- a/tooling/noir_codegen/src/noir_types.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { AbiType, Abi } from '@noir-lang/noirc_abi'; - -/** - * Keep track off all of the Noir primitive types that were used. - * Most of these will not have a 1-1 definition in TypeScript, - * so we will need to generate type aliases for them. - * - * We want to generate type aliases - * for specific types that are used in the ABI. - * - * For example: - * - If `Field` is used we want to alias that - * with `number`. - * - If `u32` is used we want to alias that with `number` too. - */ -export type PrimitiveTypesUsed = { - /** - * The name of the type alias that we will generate. - */ - aliasName: string; - /** - * The TypeScript type that we will alias to. - */ - tsType: string; -}; - -/** - * Typescript does not allow us to check for equality of non-primitive types - * easily, so we create a addIfUnique function that will only add an item - * to the map if it is not already there by using JSON.stringify. - * @param item - The item to add to the map. - */ -function addIfUnique(primitiveTypeMap: Map, item: PrimitiveTypesUsed) { - const key = JSON.stringify(item); - if (!primitiveTypeMap.has(key)) { - primitiveTypeMap.set(key, item); - } -} - -/** - * Converts an ABI type to a TypeScript type. - * @param type - The ABI type to convert. - * @returns The typescript code to define the type. - */ -function abiTypeToTs(type: AbiType, primitiveTypeMap: Map): string { - switch (type.kind) { - case 'field': - addIfUnique(primitiveTypeMap, { aliasName: 'Field', tsType: 'string' }); - return 'Field'; - case 'integer': { - const typeName = type.sign === 'signed' ? `i${type.width}` : `u${type.width}`; - // Javascript cannot safely represent the full range of Noir's integer types as numbers. - // `Number.MAX_SAFE_INTEGER == 2**53 - 1` so we disallow passing numbers to types which may exceed this. - // 52 has been chosen as the cutoff rather than 53 for safety. - const tsType = type.width <= 52 ? `string | number` : `string`; - - addIfUnique(primitiveTypeMap, { aliasName: typeName, tsType }); - return typeName; - } - case 'boolean': - return `boolean`; - case 'array': - // We can't force the usage of fixed length arrays as this currently throws errors in TS. - // The array would need to be `as const` to support this whereas that's unlikely to happen in user code. - // return `FixedLengthArray<${abiTypeToTs(type.type, primitiveTypeMap)}, ${type.length}>`; - return `${abiTypeToTs(type.type, primitiveTypeMap)}[]`; - case 'string': - // We could enforce that literals are the correct length but not generally. - // This would run into similar problems to above. - return `string`; - case 'struct': - return getLastComponentOfPath(type.path); - case 'tuple': { - const field_types = type.fields.map((field) => abiTypeToTs(field, primitiveTypeMap)); - return `[${field_types.join(', ')}]`; - } - default: - throw new Error(`Unknown ABI type ${JSON.stringify(type)}`); - } -} - -/** - * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" - * Note: that if we have a path such as "Baz", we will return "Baz". - * - * Since these paths corresponds to structs, we can assume that we - * cannot have "foo::bar::". - * - * We also make the assumption that since these paths are coming from - * Noir, then we will not have two paths that look like this: - * - foo::bar::Baz - * - cat::dog::Baz - * ie the last component of the path (struct name) is enough to uniquely identify - * the whole path. - * - * TODO: We should double check this assumption when we use type aliases, - * I expect that `foo::bar::Baz as Dog` would effectively give `foo::bar::Dog` - * @param str - The path to get the last component of. - * @returns The last component of the path. - */ -function getLastComponentOfPath(str: string): string { - const parts = str.split('::'); - const lastPart = parts[parts.length - 1]; - return lastPart; -} - -/** - * Generates TypeScript interfaces for the structs used in the ABI. - * @param type - The ABI type to generate the interface for. - * @param output - The set of structs that we have already generated bindings for. - * @returns The TypeScript code to define the struct. - */ -function generateStructInterfaces( - type: AbiType, - structsEncountered: Map, - primitiveTypeMap: Map, -) { - // Edge case to handle the array of structs case. - if ( - type.kind === 'array' && - type.type.kind === 'struct' && - !structsEncountered.has(getLastComponentOfPath(type.type.path)) - ) { - generateStructInterfaces(type.type, structsEncountered, primitiveTypeMap); - } - if (type.kind !== 'struct') return; - - const structName = getLastComponentOfPath(type.path); - if (!structsEncountered.has(structName)) { - for (const field of type.fields) { - generateStructInterfaces(field.type, structsEncountered, primitiveTypeMap); - } - structsEncountered.set(structName, type.fields); - } -} - -/** - * Generates a TypeScript interface for the ABI. - * @param abiObj - The ABI to generate the interface for. - * @returns The TypeScript code to define the interface. - */ -export function generateTsInterface( - abiObj: Abi, - structsEncountered: Map, - primitiveTypeMap: Map, -): { inputs: [string, string][]; returnValue: string | null } { - // Define structs for composite types - for (const param of abiObj.parameters) { - generateStructInterfaces(param.type, structsEncountered, primitiveTypeMap); - } - - // Generating Return type, if it exists - if (abiObj.return_type != null) { - generateStructInterfaces(abiObj.return_type.abi_type, structsEncountered, primitiveTypeMap); - } - - return getTsFunctionSignature(abiObj, primitiveTypeMap); -} - -export function codegenStructDefinitions( - structsEncountered: Map, - primitiveTypeMap: Map, -): string { - let codeGeneratedStruct = ''; - - for (const [structName, structFields] of structsEncountered) { - codeGeneratedStruct += `export type ${structName} = {\n`; - for (const field of structFields) { - codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; - } - codeGeneratedStruct += `};\n\n`; - } - - return codeGeneratedStruct; -} - -function getTsFunctionSignature( - abi: Abi, - primitiveTypeMap: Map, -): { inputs: [string, string][]; returnValue: string | null } { - const inputs: [string, string][] = abi.parameters.map((param) => [ - param.name, - abiTypeToTs(param.type, primitiveTypeMap), - ]); - const returnValue = abi.return_type ? abiTypeToTs(abi.return_type.abi_type, primitiveTypeMap) : null; - return { inputs, returnValue }; -} diff --git a/tooling/noir_codegen/src/parseArgs.ts b/tooling/noir_codegen/src/parseArgs.ts index 58468c1b8f8..83b6c8bcdb2 100644 --- a/tooling/noir_codegen/src/parseArgs.ts +++ b/tooling/noir_codegen/src/parseArgs.ts @@ -6,6 +6,8 @@ export interface ParsedArgs { files: string[]; outDir?: string | undefined; inputDir?: string | undefined; + externalArtifact: boolean; + useFixedLengthArrays: boolean; } export function parseArgs(): ParsedArgs { @@ -27,6 +29,17 @@ export function parseArgs(): ParsedArgs { 'Directory containing program artifact files. Inferred as lowest common path of all files if not specified.', }, help: { type: Boolean, defaultValue: false, alias: 'h', description: 'Prints this message.' }, + 'external-artifact': { + type: Boolean, + defaultValue: false, + description: + 'Does not embed the circuit artifact in the code, instead requiring passing the circuit artifact as an argument to the generated functions.', + }, + 'fixed-length-arrays': { + type: Boolean, + defaultValue: false, + description: 'Use fixed-length arrays for inputs and outputs.', + }, }, { helpArg: 'help', @@ -53,6 +66,8 @@ export function parseArgs(): ParsedArgs { files: rawOptions.glob, outDir: rawOptions['out-dir'], inputDir: rawOptions['input-dir'], + externalArtifact: rawOptions['external-artifact'], + useFixedLengthArrays: rawOptions['fixed-length-arrays'], }; } @@ -61,4 +76,6 @@ interface CommandLineArgs { 'out-dir'?: string; 'input-dir'?: string; help: boolean; + 'external-artifact': boolean; + 'fixed-length-arrays': boolean; } diff --git a/tooling/noir_codegen/src/utils/abi_type_with_generics.ts b/tooling/noir_codegen/src/utils/abi_type_with_generics.ts new file mode 100644 index 00000000000..844e116f444 --- /dev/null +++ b/tooling/noir_codegen/src/utils/abi_type_with_generics.ts @@ -0,0 +1,139 @@ +import { AbiType } from '@noir-lang/noirc_abi'; + +/** + * Represents a binding to a generic. + */ +export class BindingId { + constructor( + public id: number, + public isNumeric: boolean, + ) {} +} + +export type StructType = { + path: string; + fields: { name: string; type: AbiTypeWithGenerics }[]; + /** The generics of the struct, bound to the fields */ + generics: BindingId[]; +}; + +export type StringType = { + kind: 'string'; + length: number | BindingId | null; +}; + +export type Constant = { + kind: 'constant'; + value: number; +}; + +export type ArrayType = { + kind: 'array'; + length: number | BindingId | null; + type: AbiTypeWithGenerics; +}; + +export type Tuple = { + kind: 'tuple'; + fields: AbiTypeWithGenerics[]; +}; + +export type Struct = { + kind: 'struct'; + structType: StructType; + /** The arguments are the concrete instantiation of the generics in the struct type. */ + args: AbiTypeWithGenerics[]; +}; + +export type AbiTypeWithGenerics = + | { kind: 'field' } + | { kind: 'boolean' } + | { kind: 'integer'; sign: string; width: number } + | { kind: 'binding'; id: BindingId } + | { kind: 'constant'; value: number } + | StringType + | ArrayType + | Tuple + | Struct; + +/** + * Maps an ABI type to an ABI type with generics. + * This performs pure type conversion, and does not generate any bindings. + */ +export function mapAbiTypeToAbiTypeWithGenerics(abiType: AbiType): AbiTypeWithGenerics { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return abiType; + case 'array': + return { + kind: 'array', + length: abiType.length, + type: mapAbiTypeToAbiTypeWithGenerics(abiType.type), + }; + case 'struct': { + const structType = { + path: abiType.path, + fields: abiType.fields.map((field) => ({ + name: field.name, + type: mapAbiTypeToAbiTypeWithGenerics(field.type), + })), + generics: [], + }; + return { + kind: 'struct', + structType, + args: [], + }; + } + case 'tuple': + return { + kind: 'tuple', + fields: abiType.fields.map(mapAbiTypeToAbiTypeWithGenerics), + }; + default: { + const exhaustiveCheck: never = abiType; + throw new Error(`Unhandled abi type: ${exhaustiveCheck}`); + } + } +} + +/** + * Finds the structs in an ABI type. + * This won't explore nested structs. + */ +export function findStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return []; + case 'array': + return findStructsInType(abiType.type); + case 'tuple': + return abiType.fields.flatMap(findStructsInType); + case 'struct': + return [abiType]; + default: { + return []; + } + } +} + +/** + * Finds all the structs in an ABI type, including nested structs. + */ +export function findAllStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + let allStructs: Struct[] = []; + let lastStructs = findStructsInType(abiType); + while (lastStructs.length > 0) { + allStructs = allStructs.concat(lastStructs); + lastStructs = lastStructs.flatMap((struct) => + struct.structType.fields.flatMap((field) => findStructsInType(field.type)), + ); + } + return allStructs; +} diff --git a/tooling/noir_codegen/src/utils/demonomorphizer.ts b/tooling/noir_codegen/src/utils/demonomorphizer.ts new file mode 100644 index 00000000000..2b33b574557 --- /dev/null +++ b/tooling/noir_codegen/src/utils/demonomorphizer.ts @@ -0,0 +1,284 @@ +import { + type AbiTypeWithGenerics, + type ArrayType, + BindingId, + type Constant, + type StringType, + type Struct, + type StructType, + type Tuple, + findAllStructsInType, + findStructsInType, +} from './abi_type_with_generics.js'; + +export interface DemonomorphizerConfig { + leaveArrayLengthsUnbounded: boolean; + leaveStringLengthsUnbounded: boolean; +} + +/** + * Demonomorphizes a list of ABI types adding generics to structs. + * Since monomorphization of the generics destroys information, this process is not guaranteed to return the original structure. + * However, it should successfully unify all struct types that share the same name and field names. + */ +export class Demonomorphizer { + private variantsMap: Map; + private visitedStructs: Map; + private lastBindingId = 0; + + /** + * Demonomorphizes the passed in ABI types, mutating them. + */ + public static demonomorphize(abiTypes: AbiTypeWithGenerics[], config: DemonomorphizerConfig) { + new Demonomorphizer(abiTypes, config); + } + + private constructor( + private types: AbiTypeWithGenerics[], + private config: DemonomorphizerConfig, + ) { + this.variantsMap = new Map(); + this.fillVariantsMap(); + + this.visitedStructs = new Map(); + this.demonomorphizeStructs(); + } + + /** + * Finds all the variants of the structs in the types. + * A variant is every use of a struct with the same name and fields. + */ + private fillVariantsMap() { + const allStructs = this.types.flatMap(findAllStructsInType); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + const variants = this.variantsMap.get(id) ?? []; + variants.push(struct); + this.variantsMap.set(id, variants); + } + } + + private demonomorphizeStructs() { + for (const type of this.types) { + const topLevelStructs = findStructsInType(type); + for (const struct of topLevelStructs) { + this.demonomorphizeStruct(struct); + } + } + } + + /** + * Demonomorphizes a struct, by demonomorphizing its dependencies first. + * Then it'll unify the types of the variants generating a unique generic type. + * It'll also generate args that instantiate the generic type with the concrete arguments for each variant. + */ + private demonomorphizeStruct(struct: Struct) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (this.visitedStructs.has(id)) { + return; + } + const dependencies = struct.structType.fields.flatMap((field) => findStructsInType(field.type)); + for (const dependency of dependencies) { + this.demonomorphizeStruct(dependency); + } + if (this.visitedStructs.has(id)) { + throw new Error('Circular dependency detected'); + } + + const variants = this.variantsMap.get(id)!; + const mappedStructType = struct.structType; + + for (let i = 0; i < struct.structType.fields.length; i++) { + const variantTypes = variants.map((variant) => variant.structType.fields[i].type); + const mappedType = this.unifyTypes(variantTypes, mappedStructType.generics, variants); + mappedStructType.fields[i].type = mappedType; + } + + // Mutate variants setting the new struct type + variants.forEach((variant) => (variant.structType = mappedStructType)); + + this.visitedStructs.set(id, mappedStructType); + } + + /** + * Tries to unify the types of a set of variants recursively. + * Unification will imply replacing some properties with bindings and pushing bindings to the generics of the struct. + */ + private unifyTypes( + types: AbiTypeWithGenerics[], + generics: BindingId[], // Mutates generics adding new bindings + variants: Struct[], // mutates variants adding different args to the variants + ): AbiTypeWithGenerics { + const kinds = new Set(types.map((type) => type.kind)); + if (kinds.size > 1) { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + switch (types[0].kind) { + case 'field': + case 'boolean': + case 'binding': + return types[0]; + case 'integer': { + if (allDeepEqual(types)) { + return types[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + } + case 'string': { + const strings = types as StringType[]; + const unifiedStringType = strings[0]; + if (strings.every((string) => string.length === unifiedStringType.length)) { + return unifiedStringType; + } else if (!this.config.leaveStringLengthsUnbounded) { + unifiedStringType.length = this.buildNumericBindingAndPushToVariants( + strings.map((string) => { + if (typeof string.length !== 'number') { + throw new Error('Trying to unify strings with bindings'); + } + return string.length; + }), + generics, + variants, + ); + return unifiedStringType; + } else { + unifiedStringType.length = null; + return unifiedStringType; + } + } + case 'array': { + const arrays = types as ArrayType[]; + const unifiedArrayType: ArrayType = arrays[0]; + if (!arrays.every((array) => array.length === unifiedArrayType.length)) { + if (!this.config.leaveArrayLengthsUnbounded) { + unifiedArrayType.length = this.buildNumericBindingAndPushToVariants( + arrays.map((array) => { + if (typeof array.length !== 'number') { + throw new Error('Trying to unify arrays with bindings'); + } + return array.length; + }), + generics, + variants, + ); + } else { + unifiedArrayType.length = null; + } + } + + unifiedArrayType.type = this.unifyTypes( + arrays.map((array) => array.type), + generics, + variants, + ); + return unifiedArrayType; + } + case 'tuple': { + const tuples = types as Tuple[]; + const unifiedTupleType: Tuple = tuples[0]; + for (let i = 0; i < unifiedTupleType.fields.length; i++) { + unifiedTupleType.fields[i] = this.unifyTypes( + tuples.map((tuple) => tuple.fields[i]), + generics, + variants, + ); + } + return unifiedTupleType; + } + case 'struct': { + const structs = types as Struct[]; + const ids = new Set(structs.map((struct) => Demonomorphizer.buildIdForStruct(struct.structType))); + if (ids.size > 1) { + // If the types are different structs, we can only unify them by creating a new binding. + // For example, if we have a struct A { x: u32 } and a struct A { x: Field }, the only possible unification is A { x: T } + return this.buildBindingAndPushToVariants(types, generics, variants); + } else { + // If the types are the same struct, we must unify the arguments to the struct. + // For example, if we have A and A, we need to unify to A and push T to the generics of the struct type. + const unifiedStruct = structs[0]; + + if (!structs.every((struct) => struct.args.length === unifiedStruct.args.length)) { + throw new Error('Same struct with different number of args encountered'); + } + for (let i = 0; i < unifiedStruct.args.length; i++) { + const argTypes = structs.map((struct) => struct.args[i]); + unifiedStruct.args[i] = this.unifyTypes(argTypes, generics, variants); + } + return unifiedStruct; + } + } + + case 'constant': { + const constants = types as Constant[]; + if (constants.every((constant) => constant.value === constants[0].value)) { + return constants[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants, true); + } + } + + default: { + const exhaustiveCheck: never = types[0]; + throw new Error(`Unhandled abi type: ${exhaustiveCheck}`); + } + } + } + + /** + * We consider a struct to be the same if it has the same name and field names. + * Structs with the same id will be unified into a single type by the demonomorphizer. + */ + public static buildIdForStruct(struct: StructType): string { + const name = struct.path.split('::').pop()!; + const fields = struct.fields.map((field) => field.name).join(','); + return `${name}(${fields})`; + } + + private buildBindingAndPushToVariants( + concreteTypes: AbiTypeWithGenerics[], + generics: BindingId[], + variants: Struct[], + isNumeric = false, + ): AbiTypeWithGenerics { + const bindingId = new BindingId(this.lastBindingId++, isNumeric); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + const concreteType = concreteTypes[i]; + variant.args.push(concreteType); + } + + generics.push(bindingId); + return { kind: 'binding', id: bindingId }; + } + + private buildNumericBindingAndPushToVariants( + concreteNumbers: number[], + generics: BindingId[], + variants: Struct[], + ): BindingId { + const bindingId = new BindingId(this.lastBindingId++, true); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + variant.args.push({ kind: 'constant', value: concreteNumbers[i] }); + } + + generics.push(bindingId); + return bindingId; + } +} + +function allDeepEqual(arr: T[]): boolean { + if (arr.length === 0) { + return true; + } + const first = JSON.stringify(arr[0]); + for (let i = 0; i < arr.length; i++) { + if (JSON.stringify(arr[i]) !== first) { + return false; + } + } + return true; +} diff --git a/tooling/noir_codegen/src/utils/typings_generator.ts b/tooling/noir_codegen/src/utils/typings_generator.ts new file mode 100644 index 00000000000..36d2de140f0 --- /dev/null +++ b/tooling/noir_codegen/src/utils/typings_generator.ts @@ -0,0 +1,324 @@ +import { CompiledCircuit } from '@noir-lang/types'; +import { + AbiTypeWithGenerics, + BindingId, + StructType, + findAllStructsInType, + mapAbiTypeToAbiTypeWithGenerics, +} from './abi_type_with_generics.js'; +import { Demonomorphizer } from './demonomorphizer.js'; +import { Abi } from '@noir-lang/noirc_abi'; + +const codegenPrelude = `/* Autogenerated file, do not edit! */ + +/* eslint-disable */ + +import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" + +export { ForeignCallHandler } from "@noir-lang/noir_js" +`; +/** + * Keep track off all of the Noir primitive types that were used. + * Most of these will not have a 1-1 definition in TypeScript, + * so we will need to generate type aliases for them. + * + * We want to generate type aliases + * for specific types that are used in the ABI. + * + * For example: + * - If `Field` is used we want to alias that + * with `number`. + * - If `u32` is used we want to alias that with `number` too. + */ +type PrimitiveTypesUsed = { + /** + * The name of the type alias that we will generate. + */ + aliasName: string; + /** + * The TypeScript type that we will alias to. + */ + tsType: string; +}; + +/** + * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" + * Note: that if we have a path such as "Baz", we will return "Baz". + * + * Since these paths corresponds to structs, we can assume that we + * cannot have "foo::bar::". + * + * We also make the assumption that since these paths are coming from + * Noir, then we will not have two paths that look like this: + * - foo::bar::Baz + * - cat::dog::Baz + * ie the last component of the path (struct name) is enough to uniquely identify + * the whole path. + * + * TODO: We should double check this assumption when we use type aliases, + * I expect that `foo::bar::Baz as Dog` would effectively give `foo::bar::Dog` + * @param str - The path to get the last component of. + * @returns The last component of the path. + */ +function getLastComponentOfPath(str: string): string { + const parts = str.split('::'); + const lastPart = parts[parts.length - 1]; + return lastPart; +} + +/** + * Replaces a numeric binding with the corresponding generics name or the actual value. + */ +function replaceNumericBinding(id: number | BindingId, genericsNameMap: Map): string { + if (typeof id === 'number') { + return id.toString(); + } else { + return genericsNameMap.get(id.id) ?? 'unknown'; + } +} + +export class TypingsGenerator { + /** All the types in the ABIs */ + private allTypes: AbiTypeWithGenerics[] = []; + /** The demonomorphized ABIs of the circuits */ + private demonomorphizedAbis: { + circuitName: string; + params: { name: string; type: AbiTypeWithGenerics }[]; + returnType?: AbiTypeWithGenerics; + artifact?: CompiledCircuit; + }[] = []; + /** Maps struct id to name for structs with the same name and different field sets */ + private structIdToTsName = new Map(); + /** Collect all the primitives used in the types to add them to the codegen */ + private primitiveTypesUsed = new Map(); + + constructor( + circuits: { abi: Abi; circuitName: string; artifact?: CompiledCircuit }[], + private useFixedLengthArrays: boolean, + ) { + // Map all the types used in the ABIs to the demonomorphized types + for (const { abi, circuitName, artifact } of circuits) { + const params = abi.parameters.map((param) => { + const type = mapAbiTypeToAbiTypeWithGenerics(param.type); + this.allTypes.push(type); + return { name: param.name, type }; + }); + if (abi.return_type) { + const returnType = mapAbiTypeToAbiTypeWithGenerics(abi.return_type.abi_type); + this.allTypes.push(returnType); + this.demonomorphizedAbis.push({ circuitName, params, returnType, artifact }); + } else { + this.demonomorphizedAbis.push({ circuitName, params, artifact }); + } + } + // Demonomorphize the types + Demonomorphizer.demonomorphize(this.allTypes, { + leaveArrayLengthsUnbounded: !useFixedLengthArrays, + leaveStringLengthsUnbounded: true, + }); + } + + public codegen(): string { + this.primitiveTypesUsed = new Map(); + const structsCode = this.codegenAllStructs(); + const interfacesCode = this.codegenAllInterfaces(); + const primitivesCode = this.codegenAllPrimitives(); + + return ` +${codegenPrelude} +${primitivesCode} +${structsCode} +${interfacesCode}`; + } + + private codegenAllStructs(): string { + const allStructs = this.allTypes.flatMap(findAllStructsInType); + // First, deduplicate the structs used + const structTypesToExport = new Map(); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (structTypesToExport.has(id)) { + continue; + } + structTypesToExport.set(id, struct.structType); + } + + // Then, we have to consider the case where we have struct with the same name but different fields. + // For those, we'll naively append a number to the name. + const idsPerName = new Map(); + for (const [id, structType] of structTypesToExport.entries()) { + const name = getLastComponentOfPath(structType.path); + const ids = idsPerName.get(name) ?? []; + ids.push(id); + idsPerName.set(name, ids); + } + + this.structIdToTsName = new Map(); + for (const [name, ids] of idsPerName.entries()) { + if (ids.length !== 1) { + ids.forEach((id, index) => { + this.structIdToTsName.set(id, `${name}${index + 1}`); + }); + } + } + // Now we can just generate the code for the structs + let resultCode = ''; + + for (const structType of structTypesToExport.values()) { + resultCode += this.codegenStructType(structType); + } + + return resultCode; + } + + private getStructName(structType: StructType): string { + return ( + this.structIdToTsName.get(Demonomorphizer.buildIdForStruct(structType)) || getLastComponentOfPath(structType.path) + ); + } + + private codegenStructType(structType: StructType): string { + // Generate names for the generic bindings. + const genericsNameMap = new Map(); + structType.generics.forEach((generic, index) => { + genericsNameMap.set(generic.id, String.fromCharCode('A'.charCodeAt(0) + index)); + }); + + const name = this.getStructName(structType); + const generics = structType.generics.length + ? `<${structType.generics + .map((generic) => `${genericsNameMap.get(generic.id)}${generic.isNumeric ? ' extends number' : ''}`) + .join(', ')}>` + : ''; + + let resultCode = `export type ${name}${generics} = {\n`; + + for (const field of structType.fields) { + resultCode += ` ${field.name}: ${this.codegenType(field.type, genericsNameMap)};\n`; + } + + resultCode += '}\n\n'; + + return resultCode; + } + + private codegenType(type: AbiTypeWithGenerics, genericsNameMap: Map): string { + switch (type.kind) { + case 'field': + this.addIfUnique({ aliasName: 'Field', tsType: 'string' }); + return 'Field'; + case 'boolean': + return 'boolean'; + case 'integer': { + const typeName = type.sign === 'signed' ? `i${type.width}` : `u${type.width}`; + // Even though noir accepts numbers or strings for integers, it always returns strings + // So we must use string as the type here. + this.addIfUnique({ aliasName: typeName, tsType: 'string' }); + return typeName; + } + case 'binding': + return genericsNameMap.get(type.id.id) ?? 'unknown'; + case 'constant': + return type.value.toString(); + case 'string': + return `string`; + case 'array': + if (this.useFixedLengthArrays) { + if (type.length === null) { + throw new Error('Got unbounded array with fixed length arrays enabled'); + } + return `FixedLengthArray<${this.codegenType(type.type, genericsNameMap)}, ${replaceNumericBinding( + type.length, + genericsNameMap, + )}>`; + } else { + return `${this.codegenType(type.type, genericsNameMap)}[]`; + } + case 'tuple': { + const fieldTypes = type.fields.map((field) => this.codegenType(field, genericsNameMap)); + return `[${fieldTypes.join(', ')}]`; + } + case 'struct': { + const name = this.getStructName(type.structType); + if (type.args.length) { + const args = type.args.map((arg) => this.codegenType(arg, genericsNameMap)).join(', '); + return `${name}<${args}>`; + } else { + return name; + } + } + } + } + + /** + * Typescript does not allow us to check for equality of non-primitive types + * easily, so we create a addIfUnique function that will only add an item + * to the map if it is not already there by using JSON.stringify. + * @param item - The item to add to the map. + */ + private addIfUnique(item: PrimitiveTypesUsed) { + const key = JSON.stringify(item); + if (!this.primitiveTypesUsed.has(key)) { + this.primitiveTypesUsed.set(key, item); + } + } + + /** + * Codegen all the interfaces for the circuits. + * For a circuit named Foo, we'll codegen FooInputType and FooReturnType. + */ + private codegenAllInterfaces(): string { + let resultCode = ''; + for (const { circuitName, params, returnType, artifact } of this.demonomorphizedAbis) { + const functionSignature = { + inputs: params.map((param): [string, string] => [param.name, this.codegenType(param.type, new Map())]), + returnValue: returnType ? this.codegenType(returnType, new Map()) : null, + }; + resultCode += this.codegenStructType({ + path: `${circuitName}InputType`, + fields: params, + generics: [], + }); + + if (returnType) { + resultCode += `export type ${circuitName}ReturnType = ${this.codegenType(returnType, new Map())};\n`; + } + + resultCode += codegenFunction(circuitName, functionSignature, artifact); + } + return resultCode; + } + + private codegenAllPrimitives(): string { + let primitiveTypeAliases = this.useFixedLengthArrays + ? 'export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }\n' + : ''; + for (const [, value] of this.primitiveTypesUsed) { + primitiveTypeAliases += `export type ${value.aliasName} = ${value.tsType};\n`; + } + return primitiveTypeAliases; + } +} + +const codegenFunction = ( + name: string, + function_signature: { inputs: [string, string][]; returnValue: string | null }, + compiled_program?: CompiledCircuit, +): string => { + const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); + const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); + + const artifact = compiled_program + ? `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)};` + : ''; + + return `${artifact} + +export async function ${name}(${args_with_types}${compiled_program ? '' : `, ${name}_circuit: CompiledCircuit`}, foreignCallHandler?: ForeignCallHandler): Promise<${function_signature.returnValue}> { + const program = new Noir(${name}_circuit); + const args: InputMap = { ${args} }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as ${function_signature.returnValue}; +} +`; +}; diff --git a/tooling/noir_codegen/test/index.test.ts b/tooling/noir_codegen/test/index.test.ts index 03fb680a537..afc7769ed9e 100644 --- a/tooling/noir_codegen/test/index.test.ts +++ b/tooling/noir_codegen/test/index.test.ts @@ -1,18 +1,18 @@ import { expect } from 'chai'; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore File is codegenned at test time. -import { exported_function_foo, MyStruct, u64, ForeignCallHandler } from './codegen/index.js'; +import { exported_function_foo, MyStruct, u64, u32, ForeignCallHandler } from './codegen/index.js'; it('codegens a callable function', async () => { - const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + const my_struct = { foo: true, bar: ['123', '123', '123', '123'], baz: '0x00' }; - const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + const [sum, constant, struct]: [u64, u32, MyStruct] = await exported_function_foo( '2', '3', - [0, 0, 0, 0, 0], + ['0x00', '0x00', '0x00', '0x00', '0x00'], { foo: my_struct, - bar: [my_struct, my_struct, my_struct], + bar: [my_struct, my_struct], baz: '64', }, '12345', @@ -35,15 +35,15 @@ it('allows passing a custom foreign call handler', async () => { return []; }; - const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + const my_struct = { foo: true, bar: ['123', '123', '123', '123'], baz: '0x00' }; - const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + const [sum, constant, struct]: [u64, u32, MyStruct] = await exported_function_foo( '2', '3', - [0, 0, 0, 0, 0], + ['0x00', '0x00', '0x00', '0x00', '0x00'], { foo: my_struct, - bar: [my_struct, my_struct, my_struct], + bar: [my_struct, my_struct], baz: '64', }, '12345', diff --git a/tooling/noir_codegen/test/test_lib/src/lib.nr b/tooling/noir_codegen/test/test_lib/src/lib.nr index 23607c6f65f..4915b0a2c49 100644 --- a/tooling/noir_codegen/test/test_lib/src/lib.nr +++ b/tooling/noir_codegen/test/test_lib/src/lib.nr @@ -1,17 +1,23 @@ -struct MyStruct { +struct MyStruct { foo: bool, - bar: [str<5>; 3], + bar: [str; BAR_SIZE], baz: Field } -struct NestedStruct { - foo: MyStruct, - bar: [MyStruct; 3], - baz: u64 +struct NestedStruct { + foo: MyStruct, + bar: [MyStruct; BAR_SIZE], + baz: BAZ_TYP } #[export] -fn exported_function_foo(x: u64, y: u64, array: [u8; 5], my_struct: NestedStruct, string: str<5>) -> (u64, u64, MyStruct) { +fn exported_function_foo( + x: u64, + y: u64, + array: [u8; 5], + my_struct: NestedStruct<2,3,4, Field>, + string: str<5> +) -> (u64, u32, MyStruct<3, 4>) { assert(array.len() == 5); assert(my_struct.foo.foo); assert(string == "12345"); @@ -22,6 +28,6 @@ fn exported_function_foo(x: u64, y: u64, array: [u8; 5], my_struct: NestedStruct } #[export] -fn exported_function_bar(my_struct: NestedStruct) -> (u64) { +fn exported_function_bar(my_struct: NestedStruct<1,2,3, u64>) -> (u64) { my_struct.baz } diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 6adf8749aba..10fd14a0090 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.38.0", + "@aztec/bb.js": "0.41.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/yarn.lock b/yarn.lock index 85966ce3392..8fb574afa30 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.38.0": - version: 0.38.0 - resolution: "@aztec/bb.js@npm:0.38.0" +"@aztec/bb.js@npm:0.41.0": + version: 0.41.0 + resolution: "@aztec/bb.js@npm:0.41.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: 5ebc2850f37993db1d0fe4306ec612e9df14c5d227e1451f1b2f96e63e61c64225c46b32d1e1d2a1a0c37795e50b2875362520e9eb49324312516ec9fd6de2c7 + checksum: e5e0095eaff3de45726366726337b131bb6ff7cf2cb53be705572c7d6715dae4c948bf86a03cfad68bc98c0c2d83e64cbe3723cc72260c8dbfa262af8cb81f9b languageName: node linkType: hard @@ -4396,7 +4396,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.38.0 + "@aztec/bb.js": 0.41.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3