From 0cee24fab496682a3e3bfc01e16ab9523dd75b33 Mon Sep 17 00:00:00 2001 From: Ignacio Amigo Date: Wed, 11 Dec 2024 19:09:46 -0300 Subject: [PATCH] feat: Introduce ComponentPackage --- CHANGELOG.md | 2 + Cargo.lock | 217 +++++++------- bin/bench-tx/src/main.rs | 8 +- bin/tx-prover/Cargo.toml | 1 + bin/tx-prover/README.md | 8 + bin/tx-prover/src/api/mod.rs | 5 - bin/tx-prover/src/commands/mod.rs | 31 +- bin/tx-prover/src/commands/proxy.rs | 33 ++- bin/tx-prover/src/commands/update_workers.rs | 6 +- bin/tx-prover/src/commands/worker.rs | 13 + bin/tx-prover/src/main.rs | 5 +- bin/tx-prover/src/proxy/mod.rs | 242 ++++++++++------ bin/tx-prover/src/proxy/worker.rs | 85 ++++++ bin/tx-prover/src/utils.rs | 22 ++ miden-tx/src/testing/tx_context/mod.rs | 28 +- miden-tx/src/tests/kernel_tests/test_tx.rs | 4 +- miden-tx/src/tests/mod.rs | 14 +- objects/Cargo.toml | 5 +- objects/src/accounts/mod.rs | 4 + objects/src/accounts/package/mod.rs | 246 ++++++++++++++++ objects/src/accounts/package/storage_entry.rs | 268 ++++++++++++++++++ .../accounts/package/storage_entry/word.rs | 174 ++++++++++++ 22 files changed, 1176 insertions(+), 245 deletions(-) create mode 100644 bin/tx-prover/src/proxy/worker.rs create mode 100644 objects/src/accounts/package/mod.rs create mode 100644 objects/src/accounts/package/storage_entry.rs create mode 100644 objects/src/accounts/package/storage_entry/word.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index f29fec3ef..4c23a4a42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ### Changes +- Introduced `ComponentPackage` and `ComponentConfig` with TOML serialization (#1015). +- Added health check endpoints to the prover service (#1006). - Implemented serialization for `AccountHeader` (#996). - Updated Pingora crates to 0.4 and added polling time to the configuration file (#997). - Added support for `miden-tx-prover` proxy to update workers on a running proxy (#989). diff --git a/Cargo.lock b/Cargo.lock index 8cf63fd0b..a2e70c8c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -117,9 +117,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "arc-swap" @@ -229,7 +229,7 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -247,7 +247,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", - "tower 0.5.1", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -262,7 +262,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "mime", @@ -427,9 +427,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.2" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "jobserver", "libc", @@ -444,9 +444,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "num-traits", ] @@ -497,9 +497,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.21" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive 4.5.18", @@ -507,13 +507,13 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", - "clap_lex 0.7.3", + "clap_lex 0.7.4", "strsim 0.11.1", ] @@ -553,9 +553,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "cmake" @@ -621,7 +621,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.21", + "clap 4.5.23", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -822,9 +822,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "figment" @@ -990,9 +990,9 @@ dependencies = [ [[package]] name = "generator" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb949699c3e4df3a183b1d2142cb24277057055ed23c68ed58894f76c517223" +checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" dependencies = [ "cfg-if", "libc", @@ -1066,7 +1066,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http 1.1.0", + "http 1.2.0", "indexmap 2.7.0", "slab", "tokio", @@ -1158,9 +1158,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1185,7 +1185,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -1196,7 +1196,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] @@ -1247,7 +1247,7 @@ dependencies = [ "futures-channel", "futures-util", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "httparse", "httpdate", @@ -1293,7 +1293,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "hyper 1.5.1", "pin-project-lite", @@ -1547,9 +1547,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.74" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ "once_cell", "wasm-bindgen", @@ -1599,9 +1599,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.167" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libm" @@ -1665,18 +1665,18 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "logos" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6aa86787fd2da255f97a4425799c8d1fd39951f5798a1192fc1b956581f605" +checksum = "7251356ef8cb7aec833ddf598c6cb24d17b689d20b993f9d11a3d764e34e6458" dependencies = [ "logos-derive", ] [[package]] name = "logos-codegen" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f3303189202bb8a052bcd93d66b6c03e6fe70d9c7c47c0ea5e974955e54c876" +checksum = "59f80069600c0d66734f5ff52cc42f2dabd6b29d205f333d61fd7832e9e9963f" dependencies = [ "beef", "fnv", @@ -1684,15 +1684,14 @@ dependencies = [ "proc-macro2", "quote", "regex-syntax 0.8.5", - "rustc_version 0.4.1", "syn 2.0.90", ] [[package]] name = "logos-derive" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "774a1c225576486e4fdf40b74646f672c542ca3608160d348749693ae9d456e6" +checksum = "24fb722b06a9dc12adb0963ed585f19fc61dc5413e6a9be9422ef92c091e731d" dependencies = [ "logos-codegen", ] @@ -1856,7 +1855,7 @@ dependencies = [ "miden-processor", "miden-stdlib", "regex", - "thiserror 2.0.3", + "thiserror 2.0.6", "walkdir", ] @@ -1918,8 +1917,11 @@ dependencies = [ "miden-verifier", "rand", "rstest", + "semver 1.0.23", + "serde", "tempfile", - "thiserror 2.0.3", + "thiserror 2.0.6", + "toml", "winter-rand-utils", ] @@ -1992,7 +1994,7 @@ dependencies = [ "miden-verifier", "rand", "rand_chacha", - "thiserror 2.0.3", + "thiserror 2.0.6", "winter-maybe-async", ] @@ -2002,7 +2004,7 @@ version = "0.7.0" dependencies = [ "async-trait", "axum", - "clap 4.5.21", + "clap 4.5.23", "figment", "getrandom", "miden-lib", @@ -2025,6 +2027,7 @@ dependencies = [ "toml", "tonic", "tonic-build", + "tonic-health", "tonic-web", "tonic-web-wasm-client", "tracing", @@ -2454,7 +2457,7 @@ dependencies = [ "blake2", "bytes", "hex", - "http 1.1.0", + "http 1.2.0", "httparse", "httpdate", "indexmap 1.9.3", @@ -2494,7 +2497,7 @@ dependencies = [ "flate2", "futures", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "httparse", "httpdate", "libc", @@ -2540,7 +2543,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bcb3f62d852da015e76ced56e93e6d52941679a9825281c90f2897841129e59d" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", "httparse", "pingora-error", "pingora-http", @@ -2556,7 +2559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70202f126056f366549afc804741e12dd9f419cfc79a0063ab15653007a0f4c6" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", "pingora-error", ] @@ -2589,7 +2592,7 @@ dependencies = [ "derivative", "fnv", "futures", - "http 1.1.0", + "http 1.2.0", "log", "pingora-core", "pingora-error", @@ -2638,7 +2641,7 @@ dependencies = [ "clap 3.2.25", "futures", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "log", "once_cell", "pingora-cache", @@ -2778,9 +2781,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" dependencies = [ "bytes", "prost-derive", @@ -2788,11 +2791,10 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" +checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" dependencies = [ - "bytes", "heck 0.5.0", "itertools 0.13.0", "log", @@ -2809,9 +2811,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ "anyhow", "itertools 0.13.0", @@ -2822,9 +2824,9 @@ dependencies = [ [[package]] name = "prost-reflect" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b7535b02f0e5efe3e1dbfcb428be152226ed0c66cad9541f2274c8ba8d4cd40" +checksum = "20ae544fca2892fd4b7e9ff26cba1090cedf1d4d95c2aded1af15d2f93f270b8" dependencies = [ "logos", "miette", @@ -2835,9 +2837,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" +checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" dependencies = [ "prost", ] @@ -2936,9 +2938,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ "bitflags 2.6.0", ] @@ -3132,15 +3134,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3258,6 +3260,9 @@ name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +dependencies = [ + "serde", +] [[package]] name = "semver-parser" @@ -3267,18 +3272,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.215" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" +checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.215" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" +checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" dependencies = [ "proc-macro2", "quote", @@ -3676,11 +3681,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.3" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" +checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" dependencies = [ - "thiserror-impl 2.0.3", + "thiserror-impl 2.0.6", ] [[package]] @@ -3696,9 +3701,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.3" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" +checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" dependencies = [ "proc-macro2", "quote", @@ -3795,9 +3800,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -3819,9 +3824,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -3876,7 +3881,7 @@ dependencies = [ "base64 0.22.1", "bytes", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -3908,6 +3913,19 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "tonic-health" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1eaf34ddb812120f5c601162d5429933c9b527d901ab0e7f930d3147e33a09b2" +dependencies = [ + "async-stream", + "prost", + "tokio", + "tokio-stream", + "tonic", +] + [[package]] name = "tonic-web" version = "0.12.3" @@ -3916,7 +3934,7 @@ checksum = "5299dd20801ad736dccb4a5ea0da7376e59cd98f213bf1c3d478cf53f4834b58" dependencies = [ "base64 0.22.1", "bytes", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "pin-project", @@ -3938,7 +3956,7 @@ dependencies = [ "byteorder", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "httparse", @@ -3975,14 +3993,14 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.2", "tokio", "tower-layer", "tower-service", @@ -3997,7 +4015,7 @@ checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ "bitflags 2.6.0", "bytes", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "pin-project-lite", @@ -4297,9 +4315,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", "once_cell", @@ -4308,13 +4326,12 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", "syn 2.0.90", @@ -4323,9 +4340,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.47" +version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dfaf8f50e5f293737ee323940c7d8b08a66a95a419223d9f41610ca08b0833d" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" dependencies = [ "cfg-if", "js-sys", @@ -4336,9 +4353,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4346,9 +4363,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", @@ -4359,9 +4376,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.97" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "wasm-streams" @@ -4378,9 +4395,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.74" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a98bc3c33f0fe7e59ad7cd041b89034fa82a7c2d4365ca538dda6cdaf513863c" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/bin/bench-tx/src/main.rs b/bin/bench-tx/src/main.rs index aeba0c6b2..f326c1acd 100644 --- a/bin/bench-tx/src/main.rs +++ b/bin/bench-tx/src/main.rs @@ -3,7 +3,6 @@ use std::{ fs::{read_to_string, write, File}, io::Write, path::Path, - sync::Arc, }; use miden_lib::{notes::create_p2id_note, transaction::TransactionKernel}; @@ -76,7 +75,7 @@ pub fn benchmark_default_tx() -> Result { .collect::>(); let executor: TransactionExecutor = - TransactionExecutor::new(Arc::new(tx_context.clone()), None).with_tracing(); + TransactionExecutor::new(tx_context.get_data_store(), None).with_tracing(); let executed_transaction = executor .execute_transaction(account_id, block_ref, ¬e_ids, tx_context.tx_args().clone()) .map_err(|e| e.to_string())?; @@ -118,9 +117,8 @@ pub fn benchmark_p2id() -> Result { .input_notes(vec![note.clone()]) .build(); - let executor = - TransactionExecutor::new(Arc::new(tx_context.clone()), Some(falcon_auth.clone())) - .with_tracing(); + let executor = TransactionExecutor::new(tx_context.get_data_store(), Some(falcon_auth.clone())) + .with_tracing(); let block_ref = tx_context.tx_inputs().block_header().block_num(); let note_ids = tx_context diff --git a/bin/tx-prover/Cargo.toml b/bin/tx-prover/Cargo.toml index ae37e36c7..36b2b6182 100644 --- a/bin/tx-prover/Cargo.toml +++ b/bin/tx-prover/Cargo.toml @@ -54,6 +54,7 @@ serde_qs = { version = "0.13" } tokio = { version = "1.38", optional = true, features = ["full"] } tokio-stream = { version = "0.1", optional = true, features = [ "net" ]} toml = { version = "0.8" } +tonic-health = { version = "0.12" } tonic-web = { version = "0.12", optional = true } tracing = { version = "0.1", optional = true } tracing-subscriber = { version = "0.3", features = ["fmt", "json", "env-filter"], optional = true } diff --git a/bin/tx-prover/README.md b/bin/tx-prover/README.md index d9c58e273..ac3282484 100644 --- a/bin/tx-prover/README.md +++ b/bin/tx-prover/README.md @@ -59,6 +59,8 @@ max_queue_items = 10 max_retries_per_request = 1 # Maximum amount of requests that a given IP address can make per second max_req_per_sec = 5 +# Interval to check the health of the workers +health_check_interval_secs = 1 [[workers]] host = "0.0.0.0" @@ -102,6 +104,12 @@ This changes will be persisted to the configuration file. Note that, in order to update the workers, the proxy must be running in the same computer as the command is being executed because it will check if the client address is localhost to avoid any security issues. +### Health check + +The worker service implements the [gRPC Health Check](https://grpc.io/docs/guides/health-checking/) standard, and includes the methods described in this [official proto file](https://github.com/grpc/grpc-proto/blob/master/grpc/health/v1/health.proto). + +The proxy service uses this health check to determine if a worker is available to receive requests. If a worker is not available, it will be removed from the set of workers that the proxy can use to send requests, and will persist this change in the configuration file. + ## Logging Both the worker and the proxy will use the `info` log level by default, but it can be changed by setting the `RUST_LOG` environment variable. diff --git a/bin/tx-prover/src/api/mod.rs b/bin/tx-prover/src/api/mod.rs index 3ac92d5b0..c71fb344d 100644 --- a/bin/tx-prover/src/api/mod.rs +++ b/bin/tx-prover/src/api/mod.rs @@ -28,11 +28,6 @@ pub struct ProverRpcApi { local_prover: Mutex, } -// We need to implement Send and Sync for the generated code to be able to use the prover in the -// shared context. -unsafe impl Send for ProverRpcApi {} -unsafe impl Sync for ProverRpcApi {} - #[async_trait::async_trait] impl ProverApi for ProverRpcApi { async fn prove_transaction( diff --git a/bin/tx-prover/src/commands/mod.rs b/bin/tx-prover/src/commands/mod.rs index e188006bf..746dd6c96 100644 --- a/bin/tx-prover/src/commands/mod.rs +++ b/bin/tx-prover/src/commands/mod.rs @@ -9,6 +9,7 @@ use init::Init; use miden_tx_prover::PROVER_SERVICE_CONFIG_FILE_NAME; use proxy::StartProxy; use serde::{Deserialize, Serialize}; +use tracing::debug; use update_workers::{AddWorkers, RemoveWorkers, UpdateWorkers}; use worker::StartWorker; @@ -41,6 +42,8 @@ pub struct ProxyConfig { pub max_req_per_sec: isize, /// Time in milliseconds to poll available workers. pub available_workers_polling_time_ms: u64, + /// Health check interval in seconds. + pub health_check_interval_secs: u64, } impl Default for ProxyConfig { @@ -55,6 +58,7 @@ impl Default for ProxyConfig { max_retries_per_request: 1, max_req_per_sec: 5, available_workers_polling_time_ms: 20, + health_check_interval_secs: 1, } } } @@ -96,10 +100,19 @@ impl ProxyConfig { .write(config_as_toml_string.as_bytes()) .map_err(|err| format!("error writing to file: {err}"))?; - println!("Config updated successfully"); + debug!("Config updated successfully"); Ok(()) } + + /// Updates the workers in the configuration with the new list. + pub(crate) fn set_workers(workers: Vec) -> Result<(), String> { + let mut proxy_config = Self::load_config_from_file()?; + + proxy_config.workers = workers; + + proxy_config.save_to_config_file() + } } /// Configuration for a worker @@ -137,7 +150,7 @@ pub enum Command { /// values. The file will be named as defined in the /// [miden_tx_prover::PROVER_SERVICE_CONFIG_FILE_NAME] constant. Init(Init), - /// Starts the workers defined in the config file. + /// Starts the workers with the configuration defined in the command. StartWorker(StartWorker), /// Starts the proxy defined in the config file. StartProxy(StartProxy), @@ -155,26 +168,22 @@ pub enum Command { /// CLI entry point impl Cli { - pub fn execute(&self) -> Result<(), String> { + pub async fn execute(&self) -> Result<(), String> { match &self.action { // For the `StartWorker` command, we need to create a new runtime and run the worker - Command::StartWorker(worker_init) => { - let rt = tokio::runtime::Runtime::new() - .map_err(|e| format!("Failed to create runtime: {:?}", e))?; - rt.block_on(worker_init.execute()) - }, - Command::StartProxy(proxy_init) => proxy_init.execute(), + Command::StartWorker(worker_init) => worker_init.execute().await, + Command::StartProxy(proxy_init) => proxy_init.execute().await, Command::Init(init) => { // Init does not require async, so run directly init.execute() }, Command::AddWorkers(update_workers) => { let update_workers: UpdateWorkers = update_workers.clone().into(); - update_workers.execute() + update_workers.execute().await }, Command::RemoveWorkers(update_workers) => { let update_workers: UpdateWorkers = update_workers.clone().into(); - update_workers.execute() + update_workers.execute().await }, } } diff --git a/bin/tx-prover/src/commands/proxy.rs b/bin/tx-prover/src/commands/proxy.rs index 834b9cc8f..f18c5d998 100644 --- a/bin/tx-prover/src/commands/proxy.rs +++ b/bin/tx-prover/src/commands/proxy.rs @@ -1,8 +1,13 @@ use clap::Parser; -use pingora::{apps::HttpServerOptions, lb::Backend, prelude::Opt, server::Server}; +use pingora::{ + apps::HttpServerOptions, + lb::Backend, + prelude::{background_service, Opt}, + server::Server, +}; use pingora_proxy::http_proxy_service; -use crate::proxy::LoadBalancer; +use crate::proxy::{LoadBalancer, LoadBalancerState}; /// Starts the proxy defined in the config file. #[derive(Debug, Parser)] @@ -13,8 +18,8 @@ impl StartProxy { /// /// This method will first read the config file to get the list of workers to start. It will /// then start a proxy with each worker as a backend. - pub fn execute(&self) -> Result<(), String> { - let mut server = Server::new(Some(Opt::default())).expect("Failed to create server"); + pub async fn execute(&self) -> Result<(), String> { + let mut server = Server::new(Some(Opt::default())).map_err(|err| err.to_string())?; server.bootstrap(); let proxy_config = super::ProxyConfig::load_config_from_file()?; @@ -23,25 +28,33 @@ impl StartProxy { .workers .iter() .map(|worker| format!("{}:{}", worker.host, worker.port)) - .map(|worker| Backend::new(&worker).expect("Failed to create backend")) - .collect::>(); + .map(|worker| Backend::new(&worker).map_err(|err| err.to_string())) + .collect::, String>>()?; - let worker_lb = LoadBalancer::new(workers, &proxy_config); + let worker_lb = LoadBalancerState::new(workers, &proxy_config).await?; + + let health_check_service = background_service("health_check", worker_lb); + let worker_lb = health_check_service.task(); // Set up the load balancer - let mut lb = http_proxy_service(&server.configuration, worker_lb); + let mut lb = http_proxy_service(&server.configuration, LoadBalancer(worker_lb)); let proxy_host = proxy_config.host; let proxy_port = proxy_config.port.to_string(); lb.add_tcp(format!("{}:{}", proxy_host, proxy_port).as_str()); - let logic = lb.app_logic_mut().expect("No app logic found"); + let logic = lb.app_logic_mut().ok_or("Failed to get app logic")?; let mut http_server_options = HttpServerOptions::default(); // Enable HTTP/2 for plaintext http_server_options.h2c = true; logic.server_options = Some(http_server_options); + server.add_service(health_check_service); server.add_service(lb); - server.run_forever(); + tokio::task::spawn_blocking(|| server.run_forever()) + .await + .map_err(|err| err.to_string())?; + + Ok(()) } } diff --git a/bin/tx-prover/src/commands/update_workers.rs b/bin/tx-prover/src/commands/update_workers.rs index d3a92f2dd..6bde5a532 100644 --- a/bin/tx-prover/src/commands/update_workers.rs +++ b/bin/tx-prover/src/commands/update_workers.rs @@ -57,10 +57,8 @@ impl UpdateWorkers { /// - If the request fails. /// - If the status code is not successful. /// - If the X-Worker-Count header is missing. - pub fn execute(&self) -> Result<(), String> { + pub async fn execute(&self) -> Result<(), String> { // Define a runtime - let rt = tokio::runtime::Runtime::new() - .map_err(|e| format!("Failed to create runtime: {:?}", e))?; let query_params = serde_qs::to_string(&self).map_err(|err| err.to_string())?; @@ -79,7 +77,7 @@ impl UpdateWorkers { .map_err(|err| err.to_string())?; // Make the request - let response = rt.block_on(client.get(url).send()).map_err(|err| err.to_string())?; + let response = client.get(url).send().await.map_err(|err| err.to_string())?; // Check status code if !response.status().is_success() { diff --git a/bin/tx-prover/src/commands/worker.rs b/bin/tx-prover/src/commands/worker.rs index 4bff717e9..eeb965d8b 100644 --- a/bin/tx-prover/src/commands/worker.rs +++ b/bin/tx-prover/src/commands/worker.rs @@ -1,6 +1,8 @@ use clap::Parser; +use miden_tx_prover::generated::api_server::ApiServer; use tokio::net::TcpListener; use tokio_stream::wrappers::TcpListenerStream; +use tonic_health::server::health_reporter; use tracing::info; use crate::api::RpcListener; @@ -22,6 +24,10 @@ impl StartWorker { /// This method receives the host and port from the CLI and starts a worker on that address. /// In case that one of the parameters is not provided, it will default to `0.0.0.0` for the /// host and `50051` for the port. + /// + /// The worker includes a health reporter that will mark the service as serving, following the + /// [gRPC health checking protocol]( + /// https://github.com/grpc/grpc-proto/blob/master/grpc/health/v1/health.proto). pub async fn execute(&self) -> Result<(), String> { let worker_addr = format!("{}:{}", self.host, self.port); let rpc = @@ -32,9 +38,16 @@ impl StartWorker { rpc.listener.local_addr().map_err(|err| err.to_string())? ); + // Create a health reporter + let (mut health_reporter, health_service) = health_reporter(); + + // Mark the service as serving + health_reporter.set_serving::>().await; + tonic::transport::Server::builder() .accept_http1(true) .add_service(tonic_web::enable(rpc.api_service)) + .add_service(health_service) .serve_with_incoming(TcpListenerStream::new(rpc.listener)) .await .map_err(|err| err.to_string())?; diff --git a/bin/tx-prover/src/main.rs b/bin/tx-prover/src/main.rs index 825192823..278ae2570 100644 --- a/bin/tx-prover/src/main.rs +++ b/bin/tx-prover/src/main.rs @@ -5,7 +5,8 @@ mod utils; use commands::Cli; use utils::setup_tracing; -fn main() -> Result<(), String> { +#[tokio::main] +async fn main() -> Result<(), String> { use clap::Parser; setup_tracing(); @@ -14,7 +15,7 @@ fn main() -> Result<(), String> { let cli = Cli::parse(); // execute cli action - cli.execute() + cli.execute().await } // TESTS diff --git a/bin/tx-prover/src/proxy/mod.rs b/bin/tx-prover/src/proxy/mod.rs index dfa749efe..fd18188d5 100644 --- a/bin/tx-prover/src/proxy/mod.rs +++ b/bin/tx-prover/src/proxy/mod.rs @@ -1,23 +1,26 @@ -use std::{collections::VecDeque, sync::Arc, time::Duration}; +use std::{collections::VecDeque, future::Future, pin::Pin, sync::Arc, time::Duration}; use async_trait::async_trait; use once_cell::sync::Lazy; use pingora::{ lb::Backend, prelude::*, + server::ShutdownWatch, + services::background::BackgroundService, upstreams::peer::{Peer, ALPN}, }; use pingora_core::{upstreams::peer::HttpPeer, Result}; use pingora_limits::rate::Rate; use pingora_proxy::{ProxyHttp, Session}; -use tokio::sync::RwLock; -use tracing::{error, info}; +use tokio::{sync::RwLock, time::sleep}; +use tracing::{error, info, warn}; use uuid::Uuid; +use worker::Worker; use crate::{ commands::{ update_workers::{Action, UpdateWorkers}, - ProxyConfig, WorkerConfig, + ProxyConfig, }, utils::{ create_queue_full_response, create_response_with_error_message, @@ -25,40 +28,16 @@ use crate::{ }, }; +mod worker; + /// Localhost address const LOCALHOST_ADDR: &str = "127.0.0.1"; -// WORKER -// ================================================================================================ - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Worker { - worker: Backend, - is_available: bool, -} - -impl Worker { - pub fn new(worker: Backend) -> Self { - Self { worker, is_available: true } - } -} - -impl TryInto for &Worker { - type Error = String; - - fn try_into(self) -> std::result::Result { - self.worker - .as_inet() - .ok_or_else(|| "Failed to get worker address".to_string()) - .map(|worker_addr| WorkerConfig::new(&worker_addr.ip().to_string(), worker_addr.port())) - } -} - // LOAD BALANCER // ================================================================================================ /// Load balancer that uses a round robin strategy -pub struct LoadBalancer { +pub struct LoadBalancerState { workers: Arc>>, timeout_secs: Duration, connection_timeout_secs: Duration, @@ -66,24 +45,36 @@ pub struct LoadBalancer { max_retries_per_request: usize, max_req_per_sec: isize, available_workers_polling_time: Duration, + health_check_frequency: Duration, } -impl LoadBalancer { +impl LoadBalancerState { /// Create a new load balancer - pub fn new(workers: Vec, config: &ProxyConfig) -> Self { - let workers: Vec = workers.into_iter().map(Worker::new).collect(); + pub async fn new( + initial_workers: Vec, + config: &ProxyConfig, + ) -> core::result::Result { + let mut workers: Vec = Vec::with_capacity(initial_workers.len()); - Self { - workers: Arc::new(RwLock::new(workers.clone())), - timeout_secs: Duration::from_secs(config.timeout_secs), - connection_timeout_secs: Duration::from_secs(config.connection_timeout_secs), + let connection_timeout = Duration::from_secs(config.connection_timeout_secs); + let total_timeout = Duration::from_secs(config.timeout_secs); + + for worker in initial_workers { + workers.push(Worker::new(worker, connection_timeout, total_timeout).await?); + } + + Ok(Self { + workers: Arc::new(RwLock::new(workers)), + timeout_secs: total_timeout, + connection_timeout_secs: connection_timeout, max_queue_items: config.max_queue_items, max_retries_per_request: config.max_retries_per_request, max_req_per_sec: config.max_req_per_sec, available_workers_polling_time: Duration::from_millis( config.available_workers_polling_time_ms, ), - } + health_check_frequency: Duration::from_secs(config.health_check_interval_secs), + }) } /// Gets an available worker and marks it as unavailable. @@ -91,8 +82,8 @@ impl LoadBalancer { /// If no worker is available, it will return None. pub async fn pop_available_worker(&self) -> Option { let mut available_workers = self.workers.write().await; - available_workers.iter_mut().find(|w| w.is_available).map(|w| { - w.is_available = false; + available_workers.iter_mut().find(|w| w.is_available()).map(|w| { + w.set_availability(false); w.clone() }) } @@ -100,10 +91,10 @@ impl LoadBalancer { /// Marks the given worker as available. /// /// If the worker is not in the list, it won't be added. - pub async fn add_available_worker(&self, worker: Backend) { + pub async fn add_available_worker(&self, worker: Worker) { let mut available_workers = self.workers.write().await; - if let Some(w) = available_workers.iter_mut().find(|w| w.worker == worker) { - w.is_available = true; + if let Some(w) = available_workers.iter_mut().find(|w| *w == &worker) { + w.set_availability(true); } } @@ -130,45 +121,41 @@ impl LoadBalancer { update_workers: UpdateWorkers, ) -> std::result::Result<(), String> { let mut workers = self.workers.write().await; - info!("Current workers: {:?}", workers); - let workers_to_update: Vec = update_workers + let workers_to_update: Vec = update_workers .workers .iter() .map(|worker| Backend::new(worker)) .collect::, _>>() - .map_err(|err| format!("Failed to create backend: {}", err))? - .into_iter() - .map(Worker::new) - .collect(); + .map_err(|err| format!("Failed to create backend: {}", err))?; + + let mut native_workers = Vec::new(); + + for worker in workers_to_update { + native_workers + .push(Worker::new(worker, self.connection_timeout_secs, self.timeout_secs).await?); + } match update_workers.action { Action::Add => { - for worker in workers_to_update { - if !workers.iter().any(|w| w.worker == worker.worker) { + for worker in native_workers { + if !workers.iter().any(|w| w == &worker) { workers.push(worker); } } }, Action::Remove => { - for worker in workers_to_update { - workers.retain(|w| w.worker != worker.worker); + for worker in native_workers { + workers.retain(|w| w != &worker); } }, } - let mut configuration = ProxyConfig::load_config_from_file() - .map_err(|err| format!("Failed to load configuration: {}", err))?; - let new_list_of_workers = workers.iter().map(|worker| worker.try_into()).collect::, _>>()?; - configuration.workers = new_list_of_workers; - - configuration - .save_to_config_file() - .map_err(|err| format!("Failed to save configuration: {}", err))?; + ProxyConfig::set_workers(new_list_of_workers)?; info!("Workers updated: {:?}", workers); @@ -239,6 +226,27 @@ impl LoadBalancer { let workers_count = self.num_workers().await; Some(create_workers_updated_response(session, workers_count).await) } + + /// Check the health of the workers and returns a list of healthy workers. + /// + /// Performs a health check on each worker using the gRPC health check protocol. If a worker + /// is not healthy, it won't be included in the list of healthy workers. + async fn check_workers_health( + &self, + workers: impl Iterator, + ) -> Vec { + let mut healthy_workers = Vec::new(); + + for worker in workers { + if worker.is_healthy().await { + healthy_workers.push(worker.clone()); + } else { + warn!("Worker {} is not healthy", worker.address()); + } + } + + healthy_workers + } } /// Rate limiter @@ -298,7 +306,7 @@ pub struct RequestContext { /// Unique ID for the request request_id: Uuid, /// Worker that will process the request - worker: Option, + worker: Option, } impl RequestContext { @@ -312,11 +320,21 @@ impl RequestContext { } /// Set the worker that will process the request - fn set_worker(&mut self, worker: Backend) { + fn set_worker(&mut self, worker: Worker) { self.worker = Some(worker); } } +// LOAD BALANCER WRAPPER +// ================================================================================================ + +/// Wrapper around the load balancer that implements the ProxyHttp trait +/// +/// This wrapper is used to implement the ProxyHttp trait for Arc. +/// This is necessary because we want to share the load balancer between the proxy server and the +/// health check background service. +pub struct LoadBalancer(pub Arc); + /// Implements load-balancing of incoming requests across a pool of workers. /// /// At the backend-level, a request lifecycle works as follows: @@ -369,7 +387,7 @@ impl ProxyHttp for LoadBalancer { // Special handling for localhost if client_addr.contains(LOCALHOST_ADDR) { - if let Some(response) = self.handle_update_workers_request(session).await { + if let Some(response) = self.0.handle_update_workers_request(session).await { return response; } } @@ -380,8 +398,8 @@ impl ProxyHttp for LoadBalancer { let curr_window_requests = RATE_LIMITER.observe(&user_id, 1); // Rate limit the request - if curr_window_requests > self.max_req_per_sec { - return create_too_many_requests_response(session, self.max_req_per_sec).await; + if curr_window_requests > self.0.max_req_per_sec { + return create_too_many_requests_response(session, self.0.max_req_per_sec).await; }; let queue_len = QUEUE.len().await; @@ -390,7 +408,7 @@ impl ProxyHttp for LoadBalancer { info!("Queue length: {}", queue_len); // Check if the queue is full - if queue_len >= self.max_queue_items { + if queue_len >= self.0.max_queue_items { return create_queue_full_response(session).await; } @@ -423,33 +441,33 @@ impl ProxyHttp for LoadBalancer { } // Check if there is an available worker - if let Some(worker) = self.pop_available_worker().await { - info!( - "Worker {} picked up the request with ID: {}", - worker.worker.addr, request_id - ); - ctx.set_worker(worker.worker); + if let Some(worker) = self.0.pop_available_worker().await { + info!("Worker {} picked up the request with ID: {}", worker.address(), request_id); + ctx.set_worker(worker); break; } info!("All workers are busy"); - tokio::time::sleep(self.available_workers_polling_time).await; + tokio::time::sleep(self.0.available_workers_polling_time).await; } // Remove the request from the queue QUEUE.dequeue().await; // Set SNI - let mut http_peer = - HttpPeer::new(ctx.worker.clone().expect("Failed to get worker"), false, "".to_string()); + let mut http_peer = HttpPeer::new( + ctx.worker.clone().expect("Failed to get worker").address(), + false, + "".to_string(), + ); let peer_opts = http_peer.get_mut_peer_options().ok_or(Error::new(ErrorType::InternalError))?; // Timeout settings - peer_opts.total_connection_timeout = Some(self.timeout_secs); - peer_opts.connection_timeout = Some(self.connection_timeout_secs); - peer_opts.read_timeout = Some(self.timeout_secs); - peer_opts.write_timeout = Some(self.timeout_secs); - peer_opts.idle_timeout = Some(self.timeout_secs); + peer_opts.total_connection_timeout = Some(self.0.timeout_secs); + peer_opts.connection_timeout = Some(self.0.connection_timeout_secs); + peer_opts.read_timeout = Some(self.0.timeout_secs); + peer_opts.write_timeout = Some(self.0.timeout_secs); + peer_opts.idle_timeout = Some(self.0.timeout_secs); // Enable HTTP/2 peer_opts.alpn = ALPN::H2; @@ -492,7 +510,7 @@ impl ProxyHttp for LoadBalancer { ctx: &mut Self::CTX, mut e: Box, ) -> Box { - if ctx.tries > self.max_retries_per_request { + if ctx.tries > self.0.max_retries_per_request { return e; } ctx.tries += 1; @@ -514,7 +532,63 @@ impl ProxyHttp for LoadBalancer { // Mark the worker as available if let Some(worker) = ctx.worker.take() { - self.add_available_worker(worker).await; + self.0.add_available_worker(worker).await; } } } + +/// Implement the BackgroundService trait for the LoadBalancer +/// +/// A [BackgroundService] can be run as part of a Pingora application to add supporting logic that +/// exists outside of the request/response lifecycle. +/// +/// We use this implementation to periodically check the health of the workers and update the list +/// of available workers. +impl BackgroundService for LoadBalancerState { + /// Starts the health check background service. + /// + /// This function is called when the Pingora server tries to start all the services. The + /// background service can return at anytime or wait for the `shutdown` signal. + /// + /// The health check background service will periodically check the health of the workers + /// using the gRPC health check protocol. If a worker is not healthy, it will be removed from + /// the list of available workers. + /// + /// # Errors + /// - If the worker has an invalid URI. + /// - If a [WorkerConfig] cannot be created from a given [Worker]. + fn start<'life0, 'async_trait>( + &'life0 self, + _shutdown: ShutdownWatch, + ) -> Pin + ::core::marker::Send + 'async_trait>> + where + 'life0: 'async_trait, + Self: 'async_trait, + { + Box::pin(async move { + loop { + let mut workers = self.workers.write().await; + + // Perform health checks on workers and retain healthy ones + let healthy_workers = self.check_workers_health(workers.iter_mut()).await; + + // Update the worker list with healthy workers + *workers = healthy_workers; + + // Persist the updated worker list to the configuration file + let worker_configs = workers + .iter() + .map(|worker| worker.try_into()) + .collect::, _>>() + .expect("Failed to convert workers to worker configs"); + + if let Err(err) = ProxyConfig::set_workers(worker_configs) { + error!("Failed to update workers in the configuration file: {}", err); + } + + // Sleep for the defined interval before the next health check + sleep(self.health_check_frequency).await; + } + }) + } +} diff --git a/bin/tx-prover/src/proxy/worker.rs b/bin/tx-prover/src/proxy/worker.rs new file mode 100644 index 000000000..47a9c7b24 --- /dev/null +++ b/bin/tx-prover/src/proxy/worker.rs @@ -0,0 +1,85 @@ +use std::time::Duration; + +use pingora::lb::Backend; +use tonic::transport::Channel; +use tonic_health::pb::{ + health_check_response::ServingStatus, health_client::HealthClient, HealthCheckRequest, +}; +use tracing::error; + +use crate::{commands::WorkerConfig, utils::create_health_check_client}; + +// WORKER +// ================================================================================================ + +/// A worker used for processing of requests. +/// +/// A worker consists of a backend service (defined by worker address), a flag indicating wheter +/// the worker is currently available to process new requests, and a gRPC health check client. +#[derive(Debug, Clone)] +pub struct Worker { + backend: Backend, + health_check_client: HealthClient, + is_available: bool, +} + +impl Worker { + pub async fn new( + worker: Backend, + connection_timeout: Duration, + total_timeout: Duration, + ) -> Result { + let health_check_client = + create_health_check_client(worker.addr.to_string(), connection_timeout, total_timeout) + .await?; + + Ok(Self { + backend: worker, + is_available: true, + health_check_client, + }) + } + + pub fn address(&self) -> String { + self.backend.addr.to_string() + } + + pub async fn is_healthy(&mut self) -> bool { + match self + .health_check_client + .check(HealthCheckRequest { service: "".to_string() }) + .await + { + Ok(response) => response.into_inner().status() == ServingStatus::Serving, + Err(err) => { + error!("Failed to check worker health ({}): {}", self.address(), err); + false + }, + } + } + + pub fn is_available(&self) -> bool { + self.is_available + } + + pub fn set_availability(&mut self, is_available: bool) { + self.is_available = is_available; + } +} + +impl PartialEq for Worker { + fn eq(&self, other: &Self) -> bool { + self.backend == other.backend + } +} + +impl TryInto for &Worker { + type Error = String; + + fn try_into(self) -> std::result::Result { + self.backend + .as_inet() + .ok_or_else(|| "Failed to get worker address".to_string()) + .map(|worker_addr| WorkerConfig::new(&worker_addr.ip().to_string(), worker_addr.port())) + } +} diff --git a/bin/tx-prover/src/utils.rs b/bin/tx-prover/src/utils.rs index 8ddeb4d82..6e310d6d6 100644 --- a/bin/tx-prover/src/utils.rs +++ b/bin/tx-prover/src/utils.rs @@ -1,5 +1,9 @@ +use std::time::Duration; + use pingora::{http::ResponseHeader, Error, ErrorType}; use pingora_proxy::Session; +use tonic::transport::Channel; +use tonic_health::pb::health_client::HealthClient; const RESOURCE_EXHAUSTED_CODE: u16 = 8; @@ -74,3 +78,21 @@ pub async fn create_response_with_error_message( session.write_response_header(Box::new(header), true).await?; Ok(true) } + +/// Create a gRPC [HealthClient] for the given worker address. +/// +/// It will panic if the worker URI is invalid. +pub async fn create_health_check_client( + address: String, + connection_timeout: Duration, + total_timeout: Duration, +) -> Result, String> { + Channel::from_shared(format!("http://{}", address)) + .map_err(|err| err.to_string())? + .connect_timeout(connection_timeout) + .timeout(total_timeout) + .connect() + .await + .map(HealthClient::new) + .map_err(|err| err.to_string()) +} diff --git a/miden-tx/src/testing/tx_context/mod.rs b/miden-tx/src/testing/tx_context/mod.rs index 85ab62fb7..88317f748 100644 --- a/miden-tx/src/testing/tx_context/mod.rs +++ b/miden-tx/src/testing/tx_context/mod.rs @@ -86,21 +86,20 @@ impl TransactionContext { /// Executes the transaction through a [TransactionExecutor] #[maybe_async] pub fn execute(self) -> Result { - let mock_data_store = self.clone(); - let account_id = self.account().id(); - let block_num = mock_data_store.tx_inputs.block_header().block_num(); + let block_num = self.tx_inputs().block_header().block_num(); + let notes: Vec = + self.tx_inputs().input_notes().into_iter().map(|n| n.id()).collect(); + let authenticator = self .authenticator .map(|auth| Arc::new(auth) as Arc); - let mut tx_executor = TransactionExecutor::new(Arc::new(mock_data_store), authenticator); + let mut tx_executor = TransactionExecutor::new(Arc::new(self.tx_inputs), authenticator); for code in self.foreign_codes { tx_executor.load_account_code(&code); } - let notes: Vec = self.tx_inputs.input_notes().into_iter().map(|n| n.id()).collect(); - maybe_await!(tx_executor.execute_transaction(account_id, block_num, ¬es, self.tx_args)) } @@ -127,13 +126,14 @@ impl TransactionContext { pub fn tx_inputs(&self) -> &TransactionInputs { &self.tx_inputs } -} -unsafe impl Send for TransactionContext {} -unsafe impl Sync for TransactionContext {} + pub fn get_data_store(&self) -> Arc { + Arc::new(self.tx_inputs().clone()) + } +} #[maybe_async_trait] -impl DataStore for TransactionContext { +impl DataStore for TransactionInputs { #[maybe_async] fn get_transaction_inputs( &self, @@ -141,10 +141,10 @@ impl DataStore for TransactionContext { block_num: u32, notes: &[NoteId], ) -> Result { - assert_eq!(account_id, self.tx_inputs.account().id()); - assert_eq!(block_num, self.tx_inputs.block_header().block_num()); - assert_eq!(notes.len(), self.tx_inputs.input_notes().num_notes()); + assert_eq!(account_id, self.account().id()); + assert_eq!(block_num, self.block_header().block_num()); + assert_eq!(notes.len(), self.input_notes().num_notes()); - Ok(self.tx_inputs.clone()) + Ok(self.clone()) } } diff --git a/miden-tx/src/tests/kernel_tests/test_tx.rs b/miden-tx/src/tests/kernel_tests/test_tx.rs index 1c7d180fe..17d4a69e7 100644 --- a/miden-tx/src/tests/kernel_tests/test_tx.rs +++ b/miden-tx/src/tests/kernel_tests/test_tx.rs @@ -1,4 +1,4 @@ -use alloc::{sync::Arc, vec::Vec}; +use alloc::vec::Vec; use std::string::{String, ToString}; use miden_lib::{ @@ -1029,7 +1029,7 @@ fn test_fpi_execute_foreign_procedure() { .collect::>(); let mut executor: TransactionExecutor = - TransactionExecutor::new(Arc::new(tx_context.clone()), None).with_tracing(); + TransactionExecutor::new(tx_context.get_data_store(), None).with_tracing(); // load the mast forest of the foreign account's code to be able to create an account procedure // index map and execute the specified foreign procedure diff --git a/miden-tx/src/tests/mod.rs b/miden-tx/src/tests/mod.rs index d1cef2bb8..fa1c1d86f 100644 --- a/miden-tx/src/tests/mod.rs +++ b/miden-tx/src/tests/mod.rs @@ -59,7 +59,7 @@ fn transaction_executor_witness() { .with_mock_notes_preserved() .build(); - let executor = TransactionExecutor::new(Arc::new(tx_context.clone()), None); + let executor = TransactionExecutor::new(tx_context.get_data_store(), None); let account_id = tx_context.account().id(); @@ -386,7 +386,7 @@ fn executed_transaction_account_delta_new() { fn test_empty_delta_nonce_update() { let tx_context = TransactionContextBuilder::with_standard_account(ONE).build(); - let executor = TransactionExecutor::new(Arc::new(tx_context.clone()), None); + let executor = TransactionExecutor::new(tx_context.get_data_store(), None); let account_id = tx_context.tx_inputs().account().id(); let tx_script_src = " @@ -447,7 +447,7 @@ fn test_send_note_proc() { .build(); let executor = - TransactionExecutor::new(Arc::new(tx_context.clone()), None).with_debug_mode(true); + TransactionExecutor::new(tx_context.get_data_store(), None).with_debug_mode(true); let account_id = tx_context.tx_inputs().account().id(); // removed assets @@ -592,7 +592,7 @@ fn executed_transaction_output_notes() { .build(); let executor = - TransactionExecutor::new(Arc::new(tx_context.clone()), None).with_debug_mode(true); + TransactionExecutor::new(tx_context.get_data_store(), None).with_debug_mode(true); let account_id = tx_context.tx_inputs().account().id(); // removed assets @@ -843,7 +843,7 @@ fn prove_witness_and_verify() { .map(|note| note.id()) .collect::>(); - let executor = TransactionExecutor::new(Arc::new(tx_context.clone()), None); + let executor = TransactionExecutor::new(tx_context.get_data_store(), None); let executed_transaction = executor .execute_transaction(account_id, block_ref, ¬e_ids, tx_context.tx_args().clone()) .unwrap(); @@ -869,7 +869,7 @@ fn test_tx_script() { let tx_context = TransactionContextBuilder::with_standard_account(ONE) .with_mock_notes_preserved() .build(); - let executor = TransactionExecutor::new(Arc::new(tx_context.clone()), None); + let executor = TransactionExecutor::new(tx_context.get_data_store(), None); let account_id = tx_context.tx_inputs().account().id(); @@ -1006,7 +1006,7 @@ fn transaction_executor_account_code_using_custom_library() { tx_context.tx_args().advice_inputs().clone().map, ); - let mut executor = TransactionExecutor::new(Arc::new(tx_context.clone()), None); + let mut executor = TransactionExecutor::new(tx_context.get_data_store(), None); // Load the external library into the executor to make it available during transaction // execution. executor.load_library(&external_library); diff --git a/objects/Cargo.toml b/objects/Cargo.toml index 779559ee5..357e7cbe5 100644 --- a/objects/Cargo.toml +++ b/objects/Cargo.toml @@ -22,7 +22,7 @@ bench = false [features] concurrent = ["std"] default = ["std"] -std = ["assembly/std", "miden-crypto/std", "miden-verifier/std", "vm-core/std", "vm-processor/std"] +std = ["assembly/std", "miden-crypto/std", "miden-verifier/std", "vm-core/std", "vm-processor/std", "dep:toml", "dep:semver", "dep:serde"] testing = ["dep:winter-rand-utils", "dep:rand"] [dependencies] @@ -31,6 +31,9 @@ log = { version = "0.4", optional = true } miden-crypto = { workspace = true } miden-verifier = { workspace = true } rand = { workspace = true, optional = true } +semver = { version = "1.0", features = ["serde"], optional = true } +serde = { version = "1.0", features = ["derive"], optional = true } +toml = { version = "0.8", optional = true } thiserror = { workspace = true } vm-core = { workspace = true } vm-processor = { workspace = true } diff --git a/objects/src/accounts/mod.rs b/objects/src/accounts/mod.rs index 0113f368a..8ca54a3c5 100644 --- a/objects/src/accounts/mod.rs +++ b/objects/src/accounts/mod.rs @@ -29,6 +29,10 @@ pub use delta::{ NonFungibleAssetDelta, NonFungibleDeltaAction, StorageMapDelta, }; +// TODO: Restrict visibility to just necessary structs +#[cfg(feature = "std")] +pub mod package; + mod seed; pub use seed::{get_account_seed, get_account_seed_single}; diff --git a/objects/src/accounts/package/mod.rs b/objects/src/accounts/package/mod.rs new file mode 100644 index 000000000..60738de44 --- /dev/null +++ b/objects/src/accounts/package/mod.rs @@ -0,0 +1,246 @@ +use alloc::{string::String, vec::Vec}; +use std::{collections::BTreeSet, string::ToString}; + +use assembly::Library; +use semver::Version; +use serde::{ + de::{Error as DeError, Unexpected}, + Deserialize, Deserializer, Serialize, Serializer, +}; +use vm_core::utils::{Deserializable, Serializable}; + +use super::AccountType; + +mod storage_entry; +pub use storage_entry::StorageEntry; + +// COMPONENT PACKAGE +// ================================================================================================ + +// TODO: Add docs +pub struct ComponentPackage { + config: ComponentConfig, + library: Library, +} + +impl ComponentPackage { + pub fn new(config: ComponentConfig, library: Library) -> Self { + Self { config, library } + } + + pub fn config(&self) -> &ComponentConfig { + &self.config + } + + pub fn library(&self) -> &Library { + &self.library + } +} + +impl Serializable for ComponentPackage { + fn write_into(&self, target: &mut W) { + // TODO: remove this unwrap/refactor config serialization + target.write(toml::to_string(&self.config).unwrap()); + target.write(&self.library); + } +} + +impl Deserializable for ComponentPackage { + fn read_from( + source: &mut R, + ) -> Result { + // TODO: remove this unwrap/refactor config deserialization + let config = toml::from_str(&String::read_from(source)?).unwrap(); + let library = Library::read_from(source)?; + + Ok(ComponentPackage::new(config, library)) + } +} + +// COMPONENTCONFIG +// ================================================================================================ + +/// Represents the full component configuration. +/// +/// The configuration is meant to be serialized and deserialized from a TOML file. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ComponentConfig { + /// The human-readable name of the component. + name: String, + + /// A brief description of what this component is and how it works. + description: String, + + /// The version of the component using semantic versioning. + /// This can be used to track and manage component upgrades. + version: Version, + + /// A set of supported target account types for this component. + targets: BTreeSet, + + /// A list of storage entries defining the component's storage layout and initialization + /// values. + storage: Vec, +} + +impl ComponentConfig { + /// Create a new `ComponentConfig` + /// + /// # Errors + /// + /// - If the specified storage slots are not contiguous across all staorge entries + pub fn new( + name: String, + description: String, + version: Version, + targets: BTreeSet, + storage: Vec, + ) -> Result { + // Ensure no gaps in slots + let mut count = 0u8; + for storage_entry in storage.iter() { + for slot in storage_entry.slots() { + if count != *slot { + // TODO: Implement errors + return Err("Storage entries do not specify contiguous slots".to_string()); + } + count += 1; + } + } + + Ok(Self { + name, + description, + version, + targets, + storage, + }) + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn description(&self) -> &str { + &self.description + } + + pub fn version(&self) -> &Version { + &self.version + } + + pub fn targets(&self) -> &BTreeSet { + &self.targets + } + + pub fn storage(&self) -> &Vec { + &self.storage + } +} + +// SERIALIZATION +// ================================================================================================ + +impl Serialize for AccountType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let s = match self { + AccountType::FungibleFaucet => "FungibleFaucet", + AccountType::NonFungibleFaucet => "NonFungibleFaucet", + AccountType::RegularAccountImmutableCode => "RegularAccountImmutableCode", + AccountType::RegularAccountUpdatableCode => "RegularAccountUpdatableCode", + }; + serializer.serialize_str(s) + } +} + +impl<'de> Deserialize<'de> for AccountType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = Deserialize::deserialize(deserializer)?; + + match s.to_lowercase().as_str() { + "fungiblefaucet" => Ok(AccountType::FungibleFaucet), + "nonfungiblefaucet" => Ok(AccountType::NonFungibleFaucet), + "regularaccountimmutablecode" => Ok(AccountType::RegularAccountImmutableCode), + "regularaccountupdatablecode" => Ok(AccountType::RegularAccountUpdatableCode), + other => Err(D::Error::invalid_value(Unexpected::Str(other), &"a valid account type")), + } + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use storage_entry::WordRepresentation; + + use super::*; + + #[test] + fn test_contiguous_value_slots() { + let storage = vec![ + StorageEntry::Value { + name: "slot0".into(), + description: None, + slot: 0, + value: WordRepresentation::SingleHex(Default::default()), + }, + StorageEntry::MultiSlot { + name: "slot1".into(), + description: None, + slots: vec![1, 2], + values: vec![ + WordRepresentation::Array(Default::default()), + WordRepresentation::SingleHex(Default::default()), + ], + }, + ]; + + let original_config = ComponentConfig::new( + "test".into(), + "desc".into(), + Version::parse("0.1.0").unwrap(), + BTreeSet::new(), + storage, + ) + .unwrap(); + + let serialized = toml::to_string(&original_config).unwrap(); + + let deserialized: ComponentConfig = toml::from_str(&serialized).unwrap(); + assert_eq!(deserialized, original_config) + } + + #[test] + fn test_new_non_contiguous_value_slots() { + let storage = vec![ + StorageEntry::Value { + name: "slot0".into(), + description: None, + slot: 0, + value: Default::default(), + }, + StorageEntry::Value { + name: "slot2".into(), + description: None, + slot: 2, + value: Default::default(), + }, + ]; + + let result = ComponentConfig::new( + "test".into(), + "desc".into(), + Version::parse("0.1.0").unwrap(), + BTreeSet::new(), + storage, + ); + assert!(result.is_err()); + } +} diff --git a/objects/src/accounts/package/storage_entry.rs b/objects/src/accounts/package/storage_entry.rs new file mode 100644 index 000000000..fb4298794 --- /dev/null +++ b/objects/src/accounts/package/storage_entry.rs @@ -0,0 +1,268 @@ +use alloc::{string::String, vec::Vec}; + +use serde::{de::Error, ser::SerializeStruct, Deserialize, Deserializer, Serialize, Serializer}; + +mod word; +pub use word::*; + +// STORAGE ENTRY +// ================================================================================================ + +/// Represents a single entry in the component’s storage layout. +/// +/// Each entry can describe: +/// - A value slot (single word or multiple words). +/// - A map slot (key-value pairs). +/// - A multi-slot entry (spanning multiple contiguous slots, with multipe values). +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum StorageEntry { + /// A value slot, which can contain one or more words. Each word is a hex-encoded string. + Value { + /// The human-readable name of the slot. + name: String, + /// An optional description for the slot, explaining its purpose. + description: Option, + /// The numeric index of this slot in the component's storage layout. + slot: u8, + /// The initial value(s) for this slot. + value: WordRepresentation, + }, + + /// A map slot, containing multiple key-value pairs. Keys and values are hex-encoded strings. + Map { + /// The human-readable name of the map slot. + name: String, + /// An optional description for the slot, explaining its purpose. + description: Option, + /// The numeric index of this map slot in the component's storage. + slot: u8, + /// A list of key-value pairs to initialize in this map slot. + values: Vec, + }, + + /// A multi-slot entry, representing a single logical value across multiple slots. + MultiSlot { + /// The human-readable name of this multi-slot entry. + name: String, + /// An optional description for the slot, explaining its purpose. + description: Option, + /// The indices of the slots that form this multi-slot entry. + slots: Vec, + /// A list of values to fill the logical slot, with a length equal to the amount of slots. + values: Vec, + }, +} + +impl StorageEntry { + pub fn slots(&self) -> &[u8] { + match self { + StorageEntry::MultiSlot { slots, .. } => slots.as_slice(), + StorageEntry::Value { slot, .. } => std::slice::from_ref(slot), + StorageEntry::Map { slot, .. } => std::slice::from_ref(slot), + } + } +} + +// SERIALIZATION +// ================================================================================================ + +#[derive(Serialize, Deserialize)] +/// Used as a helper for validating and (de)serializing storage entries +struct RawStorageEntry { + name: String, + description: Option, + slot: Option, + slots: Option>, + value: Option, + values: Option>, +} + +impl RawStorageEntry { + pub fn validate(&self) -> Result<(), String> { + if self.slot.is_some() && self.slots.is_some() { + return Err("Fields `slot` and `slots` are mutually exclusive".into()); + } + + if self.value.is_some() && self.values.is_some() { + return Err("Fields `value` and `values` are mutually exclusive".into()); + } + + let slots_count = if self.slot.is_some() { + 1 + } else { + self.slots + .as_ref() + .expect("validated first that only one of them existed") + .len() + }; + + let values_count = if self.value.is_some() { + 1 + } else { + self.values + .as_ref() + .expect("validated first that only one of them existed") + .len() + }; + + if slots_count != values_count { + return Err(format!( + "Number of slots ({}) does not match number of values ({})", + slots_count, values_count + )); + } + + Ok(()) + } +} + +impl Serialize for StorageEntry { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + StorageEntry::Value { name, description, slot, value } => { + let field_count = 4 + if description.is_some() { 1 } else { 0 }; + let mut s = serializer.serialize_struct("Storage", field_count)?; + s.serialize_field("name", name)?; + if let Some(desc) = description { + s.serialize_field("description", desc)?; + } + s.serialize_field("slot", slot)?; + s.serialize_field("value", value)?; + s.end() + }, + StorageEntry::Map { name, description, slot, values } => { + let field_count = 3 + if description.is_some() { 1 } else { 0 }; + let mut s = serializer.serialize_struct("Storage", field_count)?; + s.serialize_field("name", name)?; + if let Some(desc) = description { + s.serialize_field("description", desc)?; + } + s.serialize_field("slot", slot)?; + s.serialize_field("values", values)?; + s.end() + }, + StorageEntry::MultiSlot { name, description, slots, values } => { + let field_count = 2 + if description.is_some() { 1 } else { 0 }; + let mut s = serializer.serialize_struct("Storage", field_count)?; + s.serialize_field("name", name)?; + if let Some(desc) = description { + s.serialize_field("description", desc)?; + } + s.serialize_field("slots", slots)?; + s.serialize_field("values", values)?; + + s.end() + }, + } + } +} + +impl<'de> Deserialize<'de> for StorageEntry { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let raw = RawStorageEntry::deserialize(deserializer)?; + raw.validate().unwrap(); + + let value_present = raw.value.is_some(); + let values_present = raw.values.is_some(); + let slots_present = raw.slots.is_some(); + + // Infer variant based on which fields are present + match (value_present, values_present, slots_present) { + (true, false, false) => { + // Value variant + Ok(StorageEntry::Value { + name: raw.name, + description: raw.description, + slot: raw.slot.ok_or_else(|| D::Error::missing_field("slot"))?, + value: raw.value.unwrap(), + }) + }, + (false, true, false) => { + // Map variant + todo!() + }, + (false, true, true) => { + // MultiSlot variant + if raw.slot.is_some() || raw.value.is_some() { + return Err(D::Error::custom("Invalid fields present for multi-slot variant.")); + } + Ok(StorageEntry::MultiSlot { + name: raw.name, + description: raw.description, + slots: raw.slots.unwrap(), + values: raw.values.unwrap(), + }) + }, + _ => Err(D::Error::custom("Could not infer storage entry type from provided fields.")), + } + } +} + +/// Key-value entry for storage maps. +// TODO: impl serde +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct MapEntry { + key: WordRepresentation, + value: WordRepresentation, +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use semver::Version; + use toml; + use vm_core::Felt; + + use super::*; + use crate::accounts::{package::ComponentConfig, AccountType}; + + #[test] + fn test_storage_entry_serialization() { + let array = [ + FeltRepresentation::SingleHex(Felt::new(0x345125)), + FeltRepresentation::SingleHex(Felt::new(0x2af)), + FeltRepresentation::SingleHex(Felt::new(0xdba3)), + FeltRepresentation::SingleHex(Felt::new(0xfffeeff)), + ]; + let storage = vec![ + StorageEntry::Value { + name: "slot0".into(), + description: Some("First slot".into()), + slot: 0, + value: WordRepresentation::SingleHex(array), + }, + StorageEntry::MultiSlot { + name: "multi".into(), + description: Some("Multi slot entry".into()), + slots: vec![1, 2, 3], + values: vec![ + WordRepresentation::SingleHex(array), + WordRepresentation::SingleHex(array), + WordRepresentation::Array(array), + ], + }, + ]; + + let config = ComponentConfig { + name: "Test Component".into(), + description: "This is a test component".into(), + version: Version::parse("1.0.0").unwrap(), + targets: BTreeSet::from([AccountType::FungibleFaucet]), + storage, + }; + + let toml = toml::to_string(&config).unwrap(); + + std::println!("toml {}", toml); + + let deserialized: ComponentConfig = toml::from_str(&toml).expect("Deserialization failed"); + assert_eq!(deserialized, config); + } +} diff --git a/objects/src/accounts/package/storage_entry/word.rs b/objects/src/accounts/package/storage_entry/word.rs new file mode 100644 index 000000000..3a2a7f239 --- /dev/null +++ b/objects/src/accounts/package/storage_entry/word.rs @@ -0,0 +1,174 @@ +use alloc::{ + string::{String, ToString}, + vec::Vec, +}; +use core::fmt; + +use serde::{ + de::{Error as DeError, SeqAccess, Visitor}, + ser::SerializeSeq, + Deserialize, Deserializer, Serialize, Serializer, +}; +use vm_core::Felt; +use vm_processor::Digest; + +use crate::digest; + +// WORDS +// ================================================================================================ + +/// Supported word representations in TOML format. +/// Represents slot values and keys. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum WordRepresentation { + SingleHex([FeltRepresentation; 4]), + Array([FeltRepresentation; 4]), +} + +impl Serialize for WordRepresentation { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + WordRepresentation::SingleHex(word) => { + // Ensure that the length of the vector is exactly 4 + let felts: [Felt; 4] = + word.iter().map(|f| (*f).into()).collect::>().try_into().map_err( + |_| serde::ser::Error::custom("Expected exactly 4 Felt elements"), + )?; + let word = Digest::from(felts); + serializer.serialize_str(&word.to_string()) + }, + + WordRepresentation::Array(words) => { + let mut seq = serializer.serialize_seq(Some(4))?; + for word in words { + seq.serialize_element(word)?; + } + seq.end() + }, + } + } +} + +impl<'de> Deserialize<'de> for WordRepresentation { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct WordRepresentationVisitor; + + impl<'de> Visitor<'de> for WordRepresentationVisitor { + type Value = WordRepresentation; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a single hex/decimal Word or an array of 4 elements") + } + + fn visit_str(self, value: &str) -> Result + where + E: DeError, + { + let digest = digest!(value); + let values: Vec = digest + .as_elements() + .iter() + .map(|f| FeltRepresentation::SingleHex(*f)) + .collect(); + Ok(WordRepresentation::SingleHex(values.try_into().unwrap())) + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let mut elements = Vec::with_capacity(4); + while let Some(felt_repr) = seq.next_element::()? { + elements.push(felt_repr); + } + + if elements.len() == 4 { + let array: [FeltRepresentation; 4] = + elements.clone().try_into().map_err(|_| { + DeError::invalid_length( + elements.len(), + &"expected an array of 4 elements", + ) + })?; + Ok(WordRepresentation::Array(array)) + } else { + Err(DeError::invalid_length(elements.len(), &"expected an array of 4 elements")) + } + } + } + + deserializer.deserialize_any(WordRepresentationVisitor) + } +} + +// FELTS +// ================================================================================================ + +/// Supported word representations in TOML format. +/// Represents slot values and keys. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum FeltRepresentation { + SingleHex(Felt), + SingleDecimal(Felt), +} + +impl From for Felt { + fn from(val: FeltRepresentation) -> Self { + match val { + FeltRepresentation::SingleHex(base_element) => base_element, + FeltRepresentation::SingleDecimal(base_element) => base_element, + } + } +} + +impl Default for FeltRepresentation { + fn default() -> Self { + FeltRepresentation::SingleHex(Felt::default()) + } +} + +impl Default for WordRepresentation { + fn default() -> Self { + WordRepresentation::SingleHex(Default::default()) + } +} + +impl<'de> Deserialize<'de> for FeltRepresentation { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + if let Some(hex_str) = value.strip_prefix("0x") { + let felt_value = u64::from_str_radix(hex_str, 16).map_err(serde::de::Error::custom)?; + Ok(FeltRepresentation::SingleHex(Felt::new(felt_value))) + } else if let Ok(decimal_value) = value.parse::() { + Ok(FeltRepresentation::SingleDecimal(Felt::new(decimal_value))) + } else { + Err(serde::de::Error::custom("Value is not a valid element")) + } + } +} +impl Serialize for FeltRepresentation { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + FeltRepresentation::SingleHex(felt) => { + let output = format!("0x{:x}", felt.as_int()); + serializer.serialize_str(&output) + }, + FeltRepresentation::SingleDecimal(felt) => { + let output = felt.to_string(); + serializer.serialize_str(&output) + }, + } + } +}