From d7fa86109c434431dad451d355c0602b5d58bb81 Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Sat, 6 Jul 2024 11:56:14 -0700 Subject: [PATCH 01/11] chore: update crate versions to v0.5.0 --- CHANGELOG.md | 2 ++ Cargo.lock | 32 ++++++++++++++++---------------- Cargo.toml | 14 +++++++------- bin/faucet/Cargo.toml | 2 +- bin/node/Cargo.toml | 2 +- crates/block-producer/Cargo.toml | 2 +- crates/proto/Cargo.toml | 2 +- crates/rpc-proto/Cargo.toml | 2 +- crates/rpc/Cargo.toml | 2 +- crates/store/Cargo.toml | 2 +- crates/utils/Cargo.toml | 2 +- 11 files changed, 33 insertions(+), 31 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8cc3d108d..3e9a8f6f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ # Changelog +## 0.5.0 (TBD) + ## 0.4.0 (2024-07-04) ### Features diff --git a/Cargo.lock b/Cargo.lock index c72ad9ed5..22944b529 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1537,7 +1537,7 @@ dependencies = [ [[package]] name = "miden-faucet" -version = "0.4.0" +version = "0.5.0" dependencies = [ "actix-cors", "actix-files", @@ -1573,7 +1573,7 @@ dependencies = [ [[package]] name = "miden-node" -version = "0.4.0" +version = "0.5.0" dependencies = [ "anyhow", "clap", @@ -1594,7 +1594,7 @@ dependencies = [ [[package]] name = "miden-node-block-producer" -version = "0.4.0" +version = "0.5.0" dependencies = [ "async-trait", "figment", @@ -1621,7 +1621,7 @@ dependencies = [ [[package]] name = "miden-node-proto" -version = "0.4.0" +version = "0.5.0" dependencies = [ "hex", "miden-node-utils", @@ -1638,7 +1638,7 @@ dependencies = [ [[package]] name = "miden-node-rpc" -version = "0.4.0" +version = "0.5.0" dependencies = [ "directories", "figment", @@ -1661,7 +1661,7 @@ dependencies = [ [[package]] name = "miden-node-store" -version = "0.4.0" +version = "0.5.0" dependencies = [ "deadpool-sqlite", "directories", @@ -1694,7 +1694,7 @@ dependencies = [ [[package]] name = "miden-node-utils" -version = "0.4.0" +version = "0.5.0" dependencies = [ "anyhow", "figment", @@ -1749,7 +1749,7 @@ dependencies = [ [[package]] name = "miden-rpc-proto" -version = "0.4.0" +version = "0.5.0" [[package]] name = "miden-stdlib" @@ -2809,9 +2809,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" +checksum = "ce6b6a2fb3a985e99cebfaefa9faa3024743da73304ca1c683a36429613d3d22" dependencies = [ "tinyvec_macros", ] @@ -3644,27 +3644,27 @@ dependencies = [ [[package]] name = "zstd" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" +checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "7.1.0" +version = "7.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" +checksum = "fa556e971e7b568dc775c136fc9de8c779b1c2fc3a63defaafadffdbd3181afa" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.11+zstd.1.5.6" +version = "2.0.12+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75652c55c0b6f3e6f12eb786fe1bc960396bf05a1eb3bf1f3691c3610ac2e6d4" +checksum = "0a4e40c320c3cb459d9a9ff6de98cff88f4751ee9275d140e2be94a2b74e4c13" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index a93d37bc4..65cb68ad1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,14 +27,14 @@ readme = "README.md" [workspace.dependencies] miden-air = { version = "0.9", default-features = false } miden-lib = { version = "0.4" } -miden-node-block-producer = { path = "crates/block-producer", version = "0.4" } -miden-node-faucet = { path = "bin/faucet", version = "0.4" } -miden-node-proto = { path = "crates/proto", version = "0.4" } -miden-node-rpc = { path = "crates/rpc", version = "0.4" } -miden-node-rpc-proto = { path = "crates/rpc-proto", version = "0.4" } -miden-node-store = { path = "crates/store", version = "0.4" } +miden-node-block-producer = { path = "crates/block-producer", version = "0.5" } +miden-node-faucet = { path = "bin/faucet", version = "0.5" } +miden-node-proto = { path = "crates/proto", version = "0.5" } +miden-node-rpc = { path = "crates/rpc", version = "0.5" } +miden-node-rpc-proto = { path = "crates/rpc-proto", version = "0.5" } +miden-node-store = { path = "crates/store", version = "0.5" } miden-node-test-macro = { path = "crates/test-macro" } -miden-node-utils = { path = "crates/utils", version = "0.4" } +miden-node-utils = { path = "crates/utils", version = "0.5" } miden-objects = { version = "0.4" } miden-processor = { version = "0.9" } miden-stdlib = { version = "0.9", default-features = false } diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 1c5977c92..9546980b3 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-faucet" -version = "0.4.0" +version = "0.5.0" description = "Miden node token faucet" readme = "README.md" keywords = ["miden", "node", "faucet"] diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index ffb4077cf..19806e9a5 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node" -version = "0.4.0" +version = "0.5.0" description = "Miden node binary" readme.workspace = true keywords = ["miden", "node"] diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 8304281fc..9ac8903b2 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-block-producer" -version = "0.4.0" +version = "0.5.0" description = "Miden node's block producer component" readme = "README.md" keywords = ["miden", "node", "block-producer"] diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index cfc0fbc28..436b1632f 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-proto" -version = "0.4.0" +version = "0.5.0" description = "Miden node message definitions (Store, Block Producer and RPC)" readme = "README.md" keywords = ["miden", "node", "protobuf", "rpc"] diff --git a/crates/rpc-proto/Cargo.toml b/crates/rpc-proto/Cargo.toml index 4bf278a4d..6c068b4cc 100644 --- a/crates/rpc-proto/Cargo.toml +++ b/crates/rpc-proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-rpc-proto" -version = "0.4.0" +version = "0.5.0" description = "Miden node RPC message definitions" readme = "README.md" keywords = ["miden", "node", "protobuf", "rpc"] diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 2be05b189..4aaf14a1a 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-rpc" -version = "0.4.0" +version = "0.5.0" description = "Miden node's front-end RPC server" readme = "README.md" keywords = ["miden", "node", "rpc"] diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index dfcaa96b8..526a03ac5 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-store" -version = "0.4.0" +version = "0.5.0" description = "Miden node's state store component" readme = "README.md" keywords = ["miden", "node", "store"] diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index bf8692961..35211e0f9 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-utils" -version = "0.4.0" +version = "0.5.0" description = "Miden node's shared utilities" readme = "README.md" keywords = ["miden", "node", "utils"] From b83614be3744ed47bc5b1102a882e33f7b7ba963 Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Sat, 6 Jul 2024 12:05:48 -0700 Subject: [PATCH 02/11] chore: updated most dependencies to latest versions --- Cargo.lock | 51 ++++++++++++++++---------------- bin/faucet/Cargo.toml | 14 ++++----- bin/node/Cargo.toml | 2 +- crates/block-producer/Cargo.toml | 2 +- crates/proto/Cargo.toml | 4 +-- crates/store/Cargo.toml | 4 +-- crates/store/src/db/mod.rs | 2 +- 7 files changed, 40 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 22944b529..4298e0a81 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,12 +274,6 @@ dependencies = [ "alloc-no-stdlib", ] -[[package]] -name = "allocator-api2" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" - [[package]] name = "android-tzdata" version = "0.1.1" @@ -515,7 +509,7 @@ dependencies = [ "bitflags 2.6.0", "cexpr", "clang-sys", - "itertools", + "itertools 0.12.1", "lazy_static", "lazycell", "proc-macro2", @@ -804,11 +798,10 @@ dependencies = [ [[package]] name = "deadpool" -version = "0.10.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" +checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" dependencies = [ - "async-trait", "deadpool-runtime", "num_cpus", "tokio", @@ -825,9 +818,9 @@ dependencies = [ [[package]] name = "deadpool-sqlite" -version = "0.7.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8010e36e12f3be22543a5e478b4af20aeead9a700dd69581a5e050a070fc22c" +checksum = "2f9cc6210316f8b7ced394e2a5d2833ce7097fb28afb5881299c61bc18e8e0e9" dependencies = [ "deadpool", "deadpool-sync", @@ -1103,14 +1096,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", - "allocator-api2", ] [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] @@ -1295,6 +1287,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -1380,9 +1381,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" dependencies = [ "bindgen", "cc", @@ -1598,7 +1599,7 @@ version = "0.5.0" dependencies = [ "async-trait", "figment", - "itertools", + "itertools 0.13.0", "miden-air", "miden-node-proto", "miden-node-store", @@ -1698,7 +1699,7 @@ version = "0.5.0" dependencies = [ "anyhow", "figment", - "itertools", + "itertools 0.12.1", "miden-objects", "serde", "thiserror", @@ -2228,7 +2229,7 @@ checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes", "heck", - "itertools", + "itertools 0.12.1", "log", "multimap", "once_cell", @@ -2248,7 +2249,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools", + "itertools 0.12.1", "proc-macro2", "quote", "syn", @@ -2449,9 +2450,9 @@ checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "rusqlite" -version = "0.30.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a78046161564f5e7cd9008aff3b2990b3850dc8e0349119b98e8f251e099f24d" +checksum = "b838eba278d213a8beaf485bd313fd580ca4505a00d5871caeb1457c55322cae" dependencies = [ "bitflags 2.6.0", "fallible-iterator", @@ -2463,9 +2464,9 @@ dependencies = [ [[package]] name = "rusqlite_migration" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4116d1697de2912db0b72069473dfb025f6c332b4a085ed041d121e8d745aea" +checksum = "55709bc01054c69e2f1cefdc886642b5e6376a8db3c86f761be0c423eebf178b" dependencies = [ "log", "rusqlite", diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 9546980b3..43cba4b9d 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -17,19 +17,19 @@ repository.workspace = true testing = ["miden-objects/testing", "miden-lib/testing"] [dependencies] -actix-cors = "0.7.0" -actix-files = "0.6.5" -actix-web = "4" -async-mutex = "1.4.0" -clap = { version = "4.3", features = ["derive"] } -derive_more = "0.99.17" +actix-cors = "0.7" +actix-files = "0.6" +actix-web = "4.8" +async-mutex = "1.4" +clap = { version = "4.5", features = ["derive"] } +derive_more = "0.99" figment = { version = "0.10", features = ["toml", "env"] } miden-lib = { workspace = true, features = ["concurrent"] } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true , features = ["concurrent"] } miden-tx = { workspace = true, features = ["concurrent"] } -rand = { version = "0.8.5" } +rand = { version = "0.8" } rand_chacha = "0.3" serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 19806e9a5..2fa7035b2 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -19,7 +19,7 @@ tracing-forest = ["miden-node-block-producer/tracing-forest"] [dependencies] anyhow = { version = "1.0" } -clap = { version = "4.3", features = ["derive"] } +clap = { version = "4.5", features = ["derive"] } miden-lib = { workspace = true, features = ["concurrent"] } miden-node-block-producer = { workspace = true } miden-node-rpc = { workspace = true } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 9ac8903b2..4de51ad98 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -17,7 +17,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] async-trait = { version = "0.1" } figment = { version = "0.10", features = ["toml", "env"] } -itertools = { version = "0.12" } +itertools = { version = "0.13" } miden-node-proto = { workspace = true } miden-node-store = { workspace = true } miden-node-utils = { workspace = true } diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index 436b1632f..c37b8789e 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -20,10 +20,10 @@ thiserror = { workspace = true } tonic = { workspace = true } [dev-dependencies] -proptest = { version = "1.2" } +proptest = { version = "1.5" } [build-dependencies] -miette = { version = "7.0", features = ["fancy"] } +miette = { version = "7.2", features = ["fancy"] } prost = { version = "0.12" } prost-build = { version = "0.12" } protox = { version = "0.6" } diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 526a03ac5..356a7b0e3 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -12,7 +12,7 @@ homepage.workspace = true repository.workspace = true [dependencies] -deadpool-sqlite = { version = "0.7", features = ["rt_tokio_1"] } +deadpool-sqlite = { version = "0.8", features = ["rt_tokio_1"] } directories = { version = "5.0" } figment = { version = "0.10", features = ["toml", "env"] } hex = { version = "0.4" } @@ -22,7 +22,7 @@ miden-node-utils = { workspace = true } miden-objects = { workspace = true } once_cell = { version = "1.18.0" } prost = { version = "0.12" } -rusqlite = { version = "0.30", features = ["array", "buildtime_bindgen", "bundled"] } +rusqlite = { version = "0.31", features = ["array", "buildtime_bindgen", "bundled"] } rusqlite_migration = { version = "1.0" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 26a192d54..aa3f3d9a9 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -128,7 +128,7 @@ impl Db { }) .await .map_err(|e| { - HookError::Message(format!("Loading carray module failed: {e}")) + HookError::Message(format!("Loading carray module failed: {e}").into()) })?; Ok(()) From 57eed43347659ed62d9d18fb9f0413e88e9c54b5 Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Sat, 6 Jul 2024 12:09:50 -0700 Subject: [PATCH 03/11] chore: update tokio dependency to the latest version --- bin/node/Cargo.toml | 2 +- crates/block-producer/Cargo.toml | 4 ++-- crates/rpc/Cargo.toml | 2 +- crates/store/Cargo.toml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 2fa7035b2..1a2b2ad09 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -28,7 +28,7 @@ miden-node-utils = { workspace = true } miden-objects = { workspace = true } rand_chacha = "0.3" serde = { version = "1.0", features = ["derive"] } -tokio = { version = "1.29", features = ["rt-multi-thread", "net", "macros"] } +tokio = { version = "1.38", features = ["rt-multi-thread", "net", "macros"] } toml = { version = "0.8" } tracing = { workspace = true } tracing-subscriber = { workspace = true } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 4de51ad98..b9c2e46b1 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -27,7 +27,7 @@ miden-stdlib = { workspace = true } miden-tx = { workspace = true } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } -tokio = { version = "1.29", features = ["rt-multi-thread", "net", "macros", "sync", "time"] } +tokio = { version = "1.38", features = ["rt-multi-thread", "net", "macros", "sync", "time"] } toml = { version = "0.8" } tonic = { workspace = true } tracing = { workspace = true } @@ -39,5 +39,5 @@ miden-air = { workspace = true } miden-node-test-macro = { path = "../test-macro" } miden-objects = { workspace = true, features = ["testing"] } once_cell = { version = "1.18" } -tokio = { version = "1.29", features = ["test-util"] } +tokio = { version = "1.38", features = ["test-util"] } winterfell = { version = "0.8" } diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 4aaf14a1a..0e87a8a8f 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -23,7 +23,7 @@ miden-objects = { workspace = true } miden-tx = { workspace = true } prost = { version = "0.12" } serde = { version = "1.0", features = ["derive"] } -tokio = { version = "1.29", features = ["rt-multi-thread", "net", "macros"] } +tokio = { version = "1.38", features = ["rt-multi-thread", "net", "macros"] } toml = { version = "0.8" } tonic = { workspace = true } tonic-web = { version = "0.11" } diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 356a7b0e3..8ea583bd9 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -20,13 +20,13 @@ miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } -once_cell = { version = "1.18.0" } +once_cell = { version = "1.18" } prost = { version = "0.12" } rusqlite = { version = "0.31", features = ["array", "buildtime_bindgen", "bundled"] } rusqlite_migration = { version = "1.0" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } -tokio = { version = "1.29", features = ["fs", "net", "macros", "rt-multi-thread"] } +tokio = { version = "1.38", features = ["fs", "net", "macros", "rt-multi-thread"] } toml = { version = "0.8" } tonic = { workspace = true } tracing = { workspace = true } From af14fb6a52a1768b7496b2bb7553593fbb9d8977 Mon Sep 17 00:00:00 2001 From: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Mon, 8 Jul 2024 21:56:53 +0200 Subject: [PATCH 04/11] chore: use workspace inheritance for crate versions (#400) This removes the need to edit each crate's toml file when bumping the version. --- Cargo.toml | 1 + bin/faucet/Cargo.toml | 2 +- bin/node/Cargo.toml | 2 +- crates/block-producer/Cargo.toml | 2 +- crates/proto/Cargo.toml | 2 +- crates/rpc-proto/Cargo.toml | 2 +- crates/rpc/Cargo.toml | 2 +- crates/store/Cargo.toml | 2 +- crates/utils/Cargo.toml | 2 +- 9 files changed, 9 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 65cb68ad1..0c6db21f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,7 @@ resolver = "2" [workspace.package] edition = "2021" rust-version = "1.78" +version = "0.5.0" license = "MIT" authors = ["Miden contributors"] homepage = "https://polygon.technology/polygon-miden" diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 43cba4b9d..037e5c83e 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-faucet" -version = "0.5.0" +version.workspace = true description = "Miden node token faucet" readme = "README.md" keywords = ["miden", "node", "faucet"] diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 1a2b2ad09..1ec7b1dea 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node" -version = "0.5.0" +version.workspace = true description = "Miden node binary" readme.workspace = true keywords = ["miden", "node"] diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index b9c2e46b1..cc76e910f 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-block-producer" -version = "0.5.0" +version.workspace = true description = "Miden node's block producer component" readme = "README.md" keywords = ["miden", "node", "block-producer"] diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index c37b8789e..b2666395f 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-proto" -version = "0.5.0" +version.workspace = true description = "Miden node message definitions (Store, Block Producer and RPC)" readme = "README.md" keywords = ["miden", "node", "protobuf", "rpc"] diff --git a/crates/rpc-proto/Cargo.toml b/crates/rpc-proto/Cargo.toml index 6c068b4cc..39da27233 100644 --- a/crates/rpc-proto/Cargo.toml +++ b/crates/rpc-proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-rpc-proto" -version = "0.5.0" +version.workspace = true description = "Miden node RPC message definitions" readme = "README.md" keywords = ["miden", "node", "protobuf", "rpc"] diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 0e87a8a8f..a4d779625 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-rpc" -version = "0.5.0" +version.workspace = true description = "Miden node's front-end RPC server" readme = "README.md" keywords = ["miden", "node", "rpc"] diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 8ea583bd9..e22d89f60 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-store" -version = "0.5.0" +version.workspace = true description = "Miden node's state store component" readme = "README.md" keywords = ["miden", "node", "store"] diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 35211e0f9..9f6a8fe6e 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-node-utils" -version = "0.5.0" +version.workspace = true description = "Miden node's shared utilities" readme = "README.md" keywords = ["miden", "node", "utils"] From 2a250f9246a88d3d25c9b14608298990f8ec5bf5 Mon Sep 17 00:00:00 2001 From: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Thu, 11 Jul 2024 10:30:51 +0200 Subject: [PATCH 05/11] feat(node): normalize configuration (#401) * refactor: move config extration into load_config * feat(node): load config separate per component This will let us specialize NodeConfig as currently it is supporting multiple things. * feat(node): normalize NodeConfig Remove redundant properties from the config options. This prevents user issues as only a single source of truth is allowed. * fix: configs ignore unknown properties Ignoring unknown configuration fields can lead to confusion as the user has no indication that it was ignored. Unknown properties now result in an error. * chore(node): update miden-node.toml config file * chore: update changelog --- CHANGELOG.md | 5 ++ bin/faucet/src/config.rs | 1 + bin/faucet/src/main.rs | 3 +- bin/node/src/commands/genesis/mod.rs | 2 +- bin/node/src/commands/start.rs | 12 ++-- bin/node/src/config.rs | 100 +++++++++++++++++++-------- bin/node/src/main.rs | 37 +++++----- config/miden-node.toml | 3 - crates/block-producer/src/config.rs | 1 + crates/rpc/src/config.rs | 1 + crates/store/src/config.rs | 1 + crates/utils/src/config.rs | 6 +- 12 files changed, 108 insertions(+), 64 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e9a8f6f0..bdc8b4404 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ ## 0.5.0 (TBD) +### Enhancements + +* [BREAKING] Configuration files with unknown properties are now rejected (#401). +* [BREAKING] Removed redundant node configuration properties (#401). + ## 0.4.0 (2024-07-04) ### Features diff --git a/bin/faucet/src/config.rs b/bin/faucet/src/config.rs index 316b06200..fc8d12dff 100644 --- a/bin/faucet/src/config.rs +++ b/bin/faucet/src/config.rs @@ -10,6 +10,7 @@ use serde::{Deserialize, Serialize}; // ================================================================================================ #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] pub struct FaucetConfig { /// Endpoint of the faucet pub endpoint: Endpoint, diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index f501c1b7d..84933d1d8 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -66,8 +66,7 @@ async fn main() -> Result<(), FaucetError> { match &cli.command { Command::Start { config } => { - let config: FaucetConfig = load_config(config.as_path()) - .extract() + let config: FaucetConfig = load_config(config) .map_err(|err| FaucetError::ConfigurationError(err.to_string()))?; let faucet_state = FaucetState::new(config.clone()).await?; diff --git a/bin/node/src/commands/genesis/mod.rs b/bin/node/src/commands/genesis/mod.rs index 0668984b3..6b34f99f8 100644 --- a/bin/node/src/commands/genesis/mod.rs +++ b/bin/node/src/commands/genesis/mod.rs @@ -78,7 +78,7 @@ pub fn make_genesis(inputs_path: &PathBuf, output_path: &PathBuf, force: &bool) }, }; - let genesis_input: GenesisInput = load_config(inputs_path).extract().map_err(|err| { + let genesis_input: GenesisInput = load_config(inputs_path).map_err(|err| { anyhow!("Failed to load {} genesis input file: {err}", inputs_path.display()) })?; info!("Genesis input file: {} has successfully been loaded.", output_path.display()); diff --git a/bin/node/src/commands/start.rs b/bin/node/src/commands/start.rs index 68d231ef1..f52e3777a 100644 --- a/bin/node/src/commands/start.rs +++ b/bin/node/src/commands/start.rs @@ -1,6 +1,6 @@ use std::time::Duration; -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, Result}; use miden_node_block_producer::{config::BlockProducerConfig, server as block_producer_server}; use miden_node_rpc::{config::RpcConfig, server as rpc_server}; use miden_node_store::{config::StoreConfig, server as store_server}; @@ -12,20 +12,20 @@ use crate::config::NodeConfig; // =================================================================================================== pub async fn start_node(config: NodeConfig) -> Result<()> { + let (block_producer, rpc, store) = config.into_parts(); + let mut join_set = JoinSet::new(); // Start store - join_set.spawn(start_store(config.store.context("Missing store configuration.")?)); + join_set.spawn(start_store(store)); // Wait for store to start & start block-producer tokio::time::sleep(Duration::from_secs(1)).await; - join_set.spawn(start_block_producer( - config.block_producer.context("Missing block-producer configuration.")?, - )); + join_set.spawn(start_block_producer(block_producer)); // Wait for block-producer to start & start rpc tokio::time::sleep(Duration::from_secs(1)).await; - join_set.spawn(start_rpc(config.rpc.context("Missing rpc configuration.")?)); + join_set.spawn(start_rpc(rpc)); // block on all tasks while let Some(res) = join_set.join_next().await { diff --git a/bin/node/src/config.rs b/bin/node/src/config.rs index 65ee98abd..04c89fa16 100644 --- a/bin/node/src/config.rs +++ b/bin/node/src/config.rs @@ -1,54 +1,98 @@ use miden_node_block_producer::config::BlockProducerConfig; use miden_node_rpc::config::RpcConfig; use miden_node_store::config::StoreConfig; +use miden_node_utils::config::Endpoint; use serde::{Deserialize, Serialize}; /// Node top-level configuration. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[derive(Clone, Default, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] pub struct NodeConfig { - pub block_producer: Option, - pub rpc: Option, - pub store: Option, + block_producer: NormalizedBlockProducerConfig, + rpc: NormalizedRpcConfig, + store: StoreConfig, +} + +/// A specialized variant of [RpcConfig] with redundant fields within [NodeConfig] removed. +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +struct NormalizedRpcConfig { + endpoint: Endpoint, +} + +/// A specialized variant of [BlockProducerConfig] with redundant fields within [NodeConfig] removed. +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +struct NormalizedBlockProducerConfig { + endpoint: Endpoint, + verify_tx_proofs: bool, +} + +impl Default for NormalizedRpcConfig { + fn default() -> Self { + // Ensure we stay in sync with the original defaults. + let RpcConfig { + endpoint, + store_url: _, + block_producer_url: _, + } = RpcConfig::default(); + Self { endpoint } + } } -impl Default for NodeConfig { +impl Default for NormalizedBlockProducerConfig { fn default() -> Self { - Self { - block_producer: Some(Default::default()), - rpc: Some(Default::default()), - store: Some(Default::default()), - } + // Ensure we stay in sync with the original defaults. + let BlockProducerConfig { endpoint, store_url: _, verify_tx_proofs } = + BlockProducerConfig::default(); + Self { endpoint, verify_tx_proofs } + } +} + +impl NodeConfig { + pub fn into_parts(self) -> (BlockProducerConfig, RpcConfig, StoreConfig) { + let Self { block_producer, rpc, store } = self; + + let block_producer = BlockProducerConfig { + endpoint: block_producer.endpoint, + store_url: store.endpoint_url(), + verify_tx_proofs: block_producer.verify_tx_proofs, + }; + + let rpc = RpcConfig { + endpoint: rpc.endpoint, + store_url: store.endpoint_url(), + block_producer_url: block_producer.endpoint_url(), + }; + + (block_producer, rpc, store) } } #[cfg(test)] mod tests { - use std::path::PathBuf; - use figment::Jail; - use miden_node_block_producer::config::BlockProducerConfig; - use miden_node_rpc::config::RpcConfig; use miden_node_store::config::StoreConfig; use miden_node_utils::config::{load_config, Endpoint}; use super::NodeConfig; - use crate::NODE_CONFIG_FILE_PATH; + use crate::{ + config::{NormalizedBlockProducerConfig, NormalizedRpcConfig}, + NODE_CONFIG_FILE_PATH, + }; #[test] - fn test_node_config() { + fn node_config() { Jail::expect_with(|jail| { jail.create_file( NODE_CONFIG_FILE_PATH, r#" [block_producer] endpoint = { host = "127.0.0.1", port = 8080 } - store_url = "http://store:8000" verify_tx_proofs = true [rpc] endpoint = { host = "127.0.0.1", port = 8080 } - store_url = "http://store:8000" - block_producer_url = "http://block_producer:8001" [store] endpoint = { host = "127.0.0.1", port = 8080 } @@ -58,29 +102,25 @@ mod tests { "#, )?; - let config: NodeConfig = - load_config(PathBuf::from(NODE_CONFIG_FILE_PATH).as_path()).extract()?; + let config: NodeConfig = load_config(NODE_CONFIG_FILE_PATH)?; assert_eq!( config, NodeConfig { - block_producer: Some(BlockProducerConfig { + block_producer: NormalizedBlockProducerConfig { endpoint: Endpoint { host: "127.0.0.1".to_string(), port: 8080, }, - store_url: "http://store:8000".to_string(), verify_tx_proofs: true - }), - rpc: Some(RpcConfig { + }, + rpc: NormalizedRpcConfig { endpoint: Endpoint { host: "127.0.0.1".to_string(), port: 8080, }, - store_url: "http://store:8000".to_string(), - block_producer_url: "http://block_producer:8001".to_string(), - }), - store: Some(StoreConfig { + }, + store: StoreConfig { endpoint: Endpoint { host: "127.0.0.1".to_string(), port: 8080, @@ -88,7 +128,7 @@ mod tests { database_filepath: "local.sqlite3".into(), genesis_filepath: "genesis.dat".into(), blockstore_dir: "blocks".into() - }), + }, } ); diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index e8a014bd3..f05f04a6d 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -6,7 +6,6 @@ use commands::{ init::init_config_files, start::{start_block_producer, start_node, start_rpc, start_store}, }; -use config::NodeConfig; use miden_node_utils::config::load_config; mod commands; @@ -87,25 +86,23 @@ async fn main() -> anyhow::Result<()> { let cli = Cli::parse(); match &cli.command { - Command::Start { command, config } => { - let config: NodeConfig = load_config(config).extract().map_err(|err| { - anyhow!("failed to load config file `{}`: {err}", config.display()) - })?; - match command { - StartCommand::Node => start_node(config).await, - StartCommand::BlockProducer => { - start_block_producer( - config.block_producer.context("Missing block-producer configuration.")?, - ) - .await - }, - StartCommand::Rpc => { - start_rpc(config.rpc.context("Missing rpc configuration.")?).await - }, - StartCommand::Store => { - start_store(config.store.context("Missing store configuration.")?).await - }, - } + Command::Start { command, config } => match command { + StartCommand::Node => { + let config = load_config(config).context("Loading configuration file")?; + start_node(config).await + }, + StartCommand::BlockProducer => { + let config = load_config(config).context("Loading configuration file")?; + start_block_producer(config).await + }, + StartCommand::Rpc => { + let config = load_config(config).context("Loading configuration file")?; + start_rpc(config).await + }, + StartCommand::Store => { + let config = load_config(config).context("Loading configuration file")?; + start_store(config).await + }, }, Command::MakeGenesis { output_path, force, inputs_path } => { commands::make_genesis(inputs_path, output_path, force) diff --git a/config/miden-node.toml b/config/miden-node.toml index f39aa829e..b4b133c19 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -3,7 +3,6 @@ [block_producer] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-block-producer', 1)) % 2**16 endpoint = { host = "localhost", port = 48046 } -store_url = "http://localhost:28943" # enables or disables the verification of transaction proofs before they are accepted into the # transaction queue. verify_tx_proofs = true @@ -11,8 +10,6 @@ verify_tx_proofs = true [rpc] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-rpc', 1)) % 2**16 endpoint = { host = "0.0.0.0", port = 57291 } -block_producer_url = "http://localhost:48046" -store_url = "http://localhost:28943" [store] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-store', 1)) % 2**16 diff --git a/crates/block-producer/src/config.rs b/crates/block-producer/src/config.rs index c54541cc6..e95b9fe8b 100644 --- a/crates/block-producer/src/config.rs +++ b/crates/block-producer/src/config.rs @@ -8,6 +8,7 @@ use serde::{Deserialize, Serialize}; /// Block producer specific configuration #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] pub struct BlockProducerConfig { pub endpoint: Endpoint, diff --git a/crates/rpc/src/config.rs b/crates/rpc/src/config.rs index dbdedb91d..7cd6e6448 100644 --- a/crates/rpc/src/config.rs +++ b/crates/rpc/src/config.rs @@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize}; // ================================================================================================ #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] pub struct RpcConfig { pub endpoint: Endpoint, /// Store gRPC endpoint in the format `http://[:]`. diff --git a/crates/store/src/config.rs b/crates/store/src/config.rs index 0cc92b569..447b5721d 100644 --- a/crates/store/src/config.rs +++ b/crates/store/src/config.rs @@ -10,6 +10,7 @@ use serde::{Deserialize, Serialize}; // ================================================================================================ #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] pub struct StoreConfig { /// Defines the listening socket. pub endpoint: Endpoint, diff --git a/crates/utils/src/config.rs b/crates/utils/src/config.rs index 2b0ecb949..c350fd36b 100644 --- a/crates/utils/src/config.rs +++ b/crates/utils/src/config.rs @@ -51,6 +51,8 @@ impl Display for Endpoint { /// relative, searches in parent directories all the way to the root as well. /// /// The above configuration options are indented to support easy of packaging and deployment. -pub fn load_config(config_file: &Path) -> Figment { - Figment::from(Toml::file(config_file)) +pub fn load_config Deserialize<'a>>( + config_file: impl AsRef, +) -> figment::Result { + Figment::from(Toml::file(config_file.as_ref())).extract() } From 90e510de3ff27d0f606c1c77012e31ad4d4c6ba0 Mon Sep 17 00:00:00 2001 From: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Mon, 15 Jul 2024 19:44:19 +0200 Subject: [PATCH 06/11] feat(block-producer): nullifier map type safety (#406) Use `Option` instead of relying on zero to indicate none. --- CHANGELOG.md | 1 + crates/block-producer/src/state_view/mod.rs | 2 +- crates/block-producer/src/store/mod.rs | 28 +++++++++++++++---- crates/block-producer/src/test_utils/store.rs | 7 +++-- 4 files changed, 29 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bdc8b4404..43e720453 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ * [BREAKING] Configuration files with unknown properties are now rejected (#401). * [BREAKING] Removed redundant node configuration properties (#401). +* Improve type safety of the transaction inputs nullifier mapping (#406). ## 0.4.0 (2024-07-04) diff --git a/crates/block-producer/src/state_view/mod.rs b/crates/block-producer/src/state_view/mod.rs index 192892cef..3f819a08f 100644 --- a/crates/block-producer/src/state_view/mod.rs +++ b/crates/block-producer/src/state_view/mod.rs @@ -252,7 +252,7 @@ fn ensure_tx_inputs_constraints( let infracting_nullifiers: Vec = tx_inputs .nullifiers .into_iter() - .filter_map(|(nullifier_in_tx, block_num)| (block_num != 0).then_some(nullifier_in_tx)) + .filter_map(|(nullifier_in_tx, block_num)| block_num.is_some().then_some(nullifier_in_tx)) .collect(); if !infracting_nullifiers.is_empty() { diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index b7bbc633f..2a7e9c419 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -1,9 +1,11 @@ use std::{ collections::BTreeMap, fmt::{Display, Formatter}, + num::NonZeroU32, }; use async_trait::async_trait; +use itertools::Itertools; use miden_node_proto::{ errors::{ConversionError, MissingFieldHelper}, generated::{ @@ -17,7 +19,7 @@ use miden_node_proto::{ }, AccountState, }; -use miden_node_utils::formatting::{format_map, format_opt}; +use miden_node_utils::formatting::format_opt; use miden_objects::{ accounts::AccountId, block::Block, @@ -80,20 +82,33 @@ pub struct TransactionInputs { pub account_id: AccountId, /// The account hash in the store corresponding to tx's account ID pub account_hash: Option, - /// Maps each consumed notes' nullifier to block number, where the note is consumed - /// (`zero` means, that note isn't consumed yet) - pub nullifiers: BTreeMap, + /// Maps each consumed notes' nullifier to block number, where the note is consumed. + /// + /// We use NonZeroU32 as the wire format uses 0 to encode none. + pub nullifiers: BTreeMap>, /// List of unauthenticated notes that were not found in the store pub missing_unauthenticated_notes: Vec, } impl Display for TransactionInputs { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let nullifiers = self + .nullifiers + .iter() + .map(|(k, v)| format!("{k}: {}", format_opt(v.as_ref()))) + .join(", "); + + let nullifiers = if nullifiers.is_empty() { + "None".to_owned() + } else { + format!("{{ {} }}", nullifiers) + }; + f.write_fmt(format_args!( "{{ account_id: {}, account_hash: {}, nullifiers: {} }}", self.account_id, format_opt(self.account_hash.as_ref()), - format_map(&self.nullifiers) + nullifiers )) } } @@ -114,7 +129,8 @@ impl TryFrom for TransactionInputs { .ok_or(NullifierTransactionInputRecord::missing_field(stringify!(nullifier)))? .try_into()?; - nullifiers.insert(nullifier, nullifier_record.block_num); + // Note that this intentionally maps 0 to None as this is the definition used in protobuf. + nullifiers.insert(nullifier, NonZeroU32::new(nullifier_record.block_num)); } let missing_unauthenticated_notes = response diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index 3280e515f..8d684fb7b 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -1,4 +1,7 @@ -use std::collections::{BTreeMap, BTreeSet}; +use std::{ + collections::{BTreeMap, BTreeSet}, + num::NonZeroU32, +}; use async_trait::async_trait; use miden_objects::{ @@ -224,7 +227,7 @@ impl Store for MockStoreSuccess { let nullifier = commitment.nullifier(); let nullifier_value = locked_produced_nullifiers.get_value(&nullifier.inner()); - (nullifier, nullifier_value[0].inner() as u32) + (nullifier, NonZeroU32::new(nullifier_value[0].inner() as u32)) }) .collect(); From ab1bf874ddc90f74fb3210364301e603db4f5b97 Mon Sep 17 00:00:00 2001 From: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Tue, 16 Jul 2024 18:21:59 +0200 Subject: [PATCH 07/11] feat(faucet): embed static website (#411) This removes the need to separately bundle this resource, simplifying the deployment process. The server url is now determined using the webpage url instead of being hardcoded to localhost. This means we no longer have to manually edit it to whatever IP it is being served on. --- CHANGELOG.md | 1 + Cargo.lock | 86 ++++++++++++++++++++-------------- bin/faucet/Cargo.toml | 6 ++- bin/faucet/build.rs | 12 +++++ bin/faucet/src/main.rs | 16 ++++--- bin/faucet/src/static/index.js | 4 +- 6 files changed, 79 insertions(+), 46 deletions(-) create mode 100644 bin/faucet/build.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 43e720453..6c10b0224 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ * [BREAKING] Configuration files with unknown properties are now rejected (#401). * [BREAKING] Removed redundant node configuration properties (#401). * Improve type safety of the transaction inputs nullifier mapping (#406). +* Embed the faucet's static website resources (#411). ## 0.4.0 (2024-07-04) diff --git a/Cargo.lock b/Cargo.lock index 4298e0a81..7332ba071 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -34,29 +34,6 @@ dependencies = [ "smallvec", ] -[[package]] -name = "actix-files" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0773d59061dedb49a8aed04c67291b9d8cf2fe0b60130a381aab53c6dd86e9be" -dependencies = [ - "actix-http", - "actix-service", - "actix-utils", - "actix-web", - "bitflags 2.6.0", - "bytes", - "derive_more", - "futures-core", - "http-range", - "log", - "mime", - "mime_guess", - "percent-encoding", - "pin-project-lite", - "v_htmlescape", -] - [[package]] name = "actix-http" version = "3.8.0" @@ -222,6 +199,18 @@ dependencies = [ "syn", ] +[[package]] +name = "actix-web-static-files" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adf6d1ef6d7a60e084f9e0595e2a5234abda14e76c105ecf8e2d0e8800c41a1f" +dependencies = [ + "actix-web", + "derive_more", + "futures-util", + "static-files", +] + [[package]] name = "addr2line" version = "0.22.0" @@ -643,6 +632,16 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "change-detection" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "159fa412eae48a1d94d0b9ecdb85c97ce56eb2a347c62394d3fdbf221adabc1a" +dependencies = [ + "path-matchers", + "path-slash", +] + [[package]] name = "chrono" version = "0.4.38" @@ -1147,12 +1146,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "http-range" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21dec9db110f5f872ed9699c3ecf50cf16f423502706ba5c72462e28d3157573" - [[package]] name = "http-range-header" version = "0.3.1" @@ -1541,8 +1534,8 @@ name = "miden-faucet" version = "0.5.0" dependencies = [ "actix-cors", - "actix-files", "actix-web", + "actix-web-static-files", "async-mutex", "clap", "derive_more", @@ -1555,6 +1548,7 @@ dependencies = [ "rand", "rand_chacha", "serde", + "static-files", "thiserror", "toml", "tonic", @@ -2061,6 +2055,21 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "path-matchers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36cd9b72a47679ec193a5f0229d9ab686b7bd45e1fbc59ccf953c9f3d83f7b2b" +dependencies = [ + "glob", +] + +[[package]] +name = "path-slash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "498a099351efa4becc6a19c72aa9270598e8fd274ca47052e37455241c88b696" + [[package]] name = "pear" version = "0.2.9" @@ -2670,6 +2679,17 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "static-files" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e8590e848e1c53be9258210bcd4a8f4118e08988f03a4e2d63b62e4ad9f7ced" +dependencies = [ + "change-detection", + "mime_guess", + "path-slash", +] + [[package]] name = "strsim" version = "0.11.1" @@ -3216,12 +3236,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" -[[package]] -name = "v_htmlescape" -version = "0.15.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e8257fbc510f0a46eb602c10215901938b5c2a7d5e70fc11483b1d3c9b5b18c" - [[package]] name = "valuable" version = "0.1.0" diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 037e5c83e..fc3f39978 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -18,8 +18,8 @@ testing = ["miden-objects/testing", "miden-lib/testing"] [dependencies] actix-cors = "0.7" -actix-files = "0.6" actix-web = "4.8" +actix-web-static-files = "4.0" async-mutex = "1.4" clap = { version = "4.5", features = ["derive"] } derive_more = "0.99" @@ -32,7 +32,11 @@ miden-tx = { workspace = true, features = ["concurrent"] } rand = { version = "0.8" } rand_chacha = "0.3" serde = { version = "1.0", features = ["derive"] } +static-files = "0.2" thiserror = { workspace = true } toml = { version = "0.8" } tonic = { workspace = true } tracing = { workspace = true } + +[build-dependencies] +static-files = "0.2" diff --git a/bin/faucet/build.rs b/bin/faucet/build.rs new file mode 100644 index 000000000..c73a75b49 --- /dev/null +++ b/bin/faucet/build.rs @@ -0,0 +1,12 @@ +use std::str::FromStr; + +fn main() -> std::io::Result<()> { + // The location of our static faucet website files. + let static_dir = std::path::PathBuf::from_str(std::env!("CARGO_MANIFEST_DIR")) + .unwrap() + .join("src") + .join("static"); + println!("cargo::rerun-if-changed={}", static_dir.to_str().expect("Valid utf-8")); + // This makes the static files available as an embedded resource. + static_files::resource_dir(static_dir).build() +} diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index 84933d1d8..b29297e13 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -7,7 +7,6 @@ mod state; use std::{fs::File, io::Write, path::PathBuf}; use actix_cors::Cors; -use actix_files::Files; use actix_web::{ middleware::{DefaultHeaders, Logger}, web, App, HttpServer, @@ -84,12 +83,10 @@ async fn main() -> Result<(), FaucetError> { .wrap(DefaultHeaders::new().add(("Cache-Control", "no-cache"))) .service(get_metadata) .service(get_tokens) - .service( - Files::new("/", "bin/faucet/src/static") - .use_etag(false) - .use_last_modified(false) - .index_file("index.html"), - ) + .service(actix_web_static_files::ResourceFiles::new( + "/", + static_resources::generate(), + )) }) .bind((config.endpoint.host, config.endpoint.port)) .map_err(|err| FaucetError::StartError(err.to_string()))? @@ -127,3 +124,8 @@ async fn main() -> Result<(), FaucetError> { Ok(()) } + +/// The static website files embedded by the build.rs script. +mod static_resources { + include!(concat!(env!("OUT_DIR"), "/generated.rs")); +} diff --git a/bin/faucet/src/static/index.js b/bin/faucet/src/static/index.js index d069678c4..36ca485c9 100644 --- a/bin/faucet/src/static/index.js +++ b/bin/faucet/src/static/index.js @@ -17,7 +17,7 @@ document.addEventListener('DOMContentLoaded', function () { publicButton.addEventListener('click', () => {handleButtonClick(false)}); function fetchMetadata() { - fetch('http://localhost:8080/get_metadata') + fetch(window.location.href + 'get_metadata') .then(response => response.json()) .then(data => { faucetIdElem.textContent = data.id; @@ -54,7 +54,7 @@ document.addEventListener('DOMContentLoaded', function () { loading.style.display = 'block'; try { - const response = await fetch('http://localhost:8080/get_tokens', { + const response = await fetch(window.location.href + 'get_tokens', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ account_id: accountId, is_private_note: isPrivateNote, asset_amount: parseInt(assetSelect.value)}) From aab0b58a3746d284fc265815516f715598c28465 Mon Sep 17 00:00:00 2001 From: polydez <155382956+polydez@users.noreply.github.com> Date: Fri, 19 Jul 2024 15:22:26 +0500 Subject: [PATCH 08/11] Implemented test for unauthenticated input notes (#408) * tests: refactor, add tests for transaction batch * refactor: move mock methods to `test_utils` * tests: implement tests for batch/block producers and in-flight notes verifications * fix: clippy warnings * tests: fix getting unauthenticated input notes in `MockStoreSuccess` * tests: cache mocked private accounts in order to reuse them on next creations * tests: don't use 0's account index * tests: fix block builder test * format: apply rustfmt after merging * fix: clippy warnings * refactor: address review comments * refactor: accept suggestion from the review Co-authored-by: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> * tests: address review comments --------- Co-authored-by: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> --- Cargo.lock | 3 + crates/block-producer/Cargo.toml | 3 + .../block-producer/src/batch_builder/batch.rs | 166 ++++++++++++++++-- .../block-producer/src/batch_builder/mod.rs | 4 +- .../src/batch_builder/tests/mod.rs | 161 ++++++++++++++++- .../block-producer/src/block_builder/mod.rs | 13 +- .../src/block_builder/prover/tests.rs | 32 ++-- .../block-producer/src/block_builder/tests.rs | 2 +- .../src/state_view/tests/verify_tx.rs | 82 ++++++++- .../block-producer/src/test_utils/account.rs | 59 +++++-- crates/block-producer/src/test_utils/batch.rs | 4 +- crates/block-producer/src/test_utils/block.rs | 45 +++-- crates/block-producer/src/test_utils/mod.rs | 6 +- crates/block-producer/src/test_utils/note.rs | 24 +++ .../src/test_utils/proven_tx.rs | 38 +++- crates/block-producer/src/test_utils/store.rs | 92 +++++++--- .../block-producer/src/txqueue/tests/mod.rs | 12 +- 17 files changed, 629 insertions(+), 117 deletions(-) create mode 100644 crates/block-producer/src/test_utils/note.rs diff --git a/Cargo.lock b/Cargo.lock index 7332ba071..557fb2851 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1595,6 +1595,7 @@ dependencies = [ "figment", "itertools 0.13.0", "miden-air", + "miden-lib", "miden-node-proto", "miden-node-store", "miden-node-test-macro", @@ -1604,6 +1605,7 @@ dependencies = [ "miden-stdlib", "miden-tx", "once_cell", + "rand_chacha", "serde", "thiserror", "tokio", @@ -1767,6 +1769,7 @@ dependencies = [ "miden-prover", "miden-verifier", "rand", + "rand_chacha", "winter-maybe-async", ] diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index cc76e910f..85d7149e3 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -36,8 +36,11 @@ tracing-subscriber = { workspace = true } [dev-dependencies] figment = { version = "0.10", features = ["toml", "env", "test"] } miden-air = { workspace = true } +miden-lib = { workspace = true, features = ["testing"] } miden-node-test-macro = { path = "../test-macro" } miden-objects = { workspace = true, features = ["testing"] } +miden-tx = { workspace = true, features = ["testing"] } once_cell = { version = "1.18" } +rand_chacha = { version = "0.3", default-features = false } tokio = { version = "1.38", features = ["test-util"] } winterfell = { version = "0.8" } diff --git a/crates/block-producer/src/batch_builder/batch.rs b/crates/block-producer/src/batch_builder/batch.rs index 010312a0f..04dbe72dc 100644 --- a/crates/block-producer/src/batch_builder/batch.rs +++ b/crates/block-producer/src/batch_builder/batch.rs @@ -56,7 +56,7 @@ impl TransactionBatch { #[instrument(target = "miden-block-producer", name = "new_batch", skip_all, err)] pub fn new( txs: Vec, - found_unauthenticated_notes: Option>, + found_unauthenticated_notes: BTreeMap, ) -> Result { let id = Self::compute_id(&txs); @@ -81,8 +81,7 @@ impl TransactionBatch { // Populate batch produced nullifiers and match output notes with corresponding // unauthenticated input notes in the same batch, which are removed from the unauthenticated - // input notes set. We also don't add nullifiers for such output notes to the produced - // nullifiers set. + // input notes set. // // One thing to note: // This still allows transaction `A` to consume an unauthenticated note `x` and output note `y` @@ -97,13 +96,12 @@ impl TransactionBatch { continue; } - match found_unauthenticated_notes { - Some(ref found_notes) => match found_notes.get(&input_note_header.id()) { - Some(_path) => input_note.nullifier().into(), - None => input_note.clone(), - }, - None => input_note.clone(), - } + // If an unauthenticated note was found in the store, transform it to an authenticated one + // (i.e. erase additional note details except the nullifier) + found_unauthenticated_notes + .get(&input_note_header.id()) + .map(|_path| InputNoteCommitment::from(input_note.nullifier())) + .unwrap_or_else(|| input_note.clone()) }, None => input_note.clone(), }; @@ -193,6 +191,7 @@ impl TransactionBatch { } } +#[derive(Debug)] struct OutputNoteTracker { output_notes: Vec>, output_note_index: BTreeMap, @@ -244,3 +243,150 @@ impl OutputNoteTracker { self.output_notes.into_iter().flatten().collect() } } + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::{ + mock_proven_tx, + note::{mock_note, mock_output_note, mock_unauthenticated_note_commitment}, + }; + + #[test] + fn test_output_note_tracker_duplicate_output_notes() { + let mut txs = mock_proven_txs(); + + let result = OutputNoteTracker::new(&txs); + assert!( + result.is_ok(), + "Creation of output note tracker was not expected to fail: {result:?}" + ); + + let duplicate_output_note = txs[1].output_notes().get_note(1).clone(); + + txs.push(mock_proven_tx( + 3, + vec![], + vec![duplicate_output_note.clone(), mock_output_note(8), mock_output_note(4)], + )); + + match OutputNoteTracker::new(&txs) { + Err(BuildBatchError::DuplicateOutputNote(note_id, _)) => { + assert_eq!(note_id, duplicate_output_note.id()) + }, + res => panic!("Unexpected result: {res:?}"), + } + } + + #[test] + fn test_output_note_tracker_remove_in_place_consumed_note() { + let txs = mock_proven_txs(); + let mut tracker = OutputNoteTracker::new(&txs).unwrap(); + + let note_to_remove = mock_note(4); + + assert!(tracker.remove_note(note_to_remove.header(), &txs).unwrap()); + assert!(!tracker.remove_note(note_to_remove.header(), &txs).unwrap()); + + // Check that output notes are in the expected order and consumed note was removed + assert_eq!( + tracker.into_notes(), + vec![ + mock_output_note(2), + mock_output_note(3), + mock_output_note(6), + mock_output_note(7), + mock_output_note(8), + ] + ); + } + + #[test] + fn test_duplicate_unauthenticated_notes() { + let mut txs = mock_proven_txs(); + let duplicate_note = mock_note(5); + txs.push(mock_proven_tx(4, vec![duplicate_note.clone()], vec![mock_output_note(9)])); + match TransactionBatch::new(txs, Default::default()) { + Err(BuildBatchError::DuplicateUnauthenticatedNote(note_id, _)) => { + assert_eq!(note_id, duplicate_note.id()) + }, + res => panic!("Unexpected result: {res:?}"), + } + } + + #[test] + fn test_consume_notes_in_place() { + let mut txs = mock_proven_txs(); + let note_to_consume = mock_note(3); + txs.push(mock_proven_tx( + 3, + vec![mock_note(11), note_to_consume, mock_note(13)], + vec![mock_output_note(9), mock_output_note(10)], + )); + + let batch = TransactionBatch::new(txs, Default::default()).unwrap(); + + // One of the unauthenticated notes must be removed from the batch due to the consumption + // of the corresponding output note + let expected_input_notes = vec![ + mock_unauthenticated_note_commitment(1), + mock_unauthenticated_note_commitment(5), + mock_unauthenticated_note_commitment(11), + mock_unauthenticated_note_commitment(13), + ]; + assert_eq!(batch.input_notes, expected_input_notes); + + // One of the output notes must be removed from the batch due to the consumption + // by the corresponding unauthenticated note + let expected_output_notes = vec![ + mock_output_note(2), + mock_output_note(4), + mock_output_note(6), + mock_output_note(7), + mock_output_note(8), + mock_output_note(9), + mock_output_note(10), + ]; + assert_eq!(batch.output_notes.len(), expected_output_notes.len()); + assert_eq!(batch.output_notes, expected_output_notes); + + // Ensure all nullifiers match the corresponding input notes' nullifiers + let expected_nullifiers: Vec<_> = + batch.input_notes().iter().map(InputNoteCommitment::nullifier).collect(); + let actual_nullifiers: Vec<_> = batch.produced_nullifiers().collect(); + assert_eq!(actual_nullifiers, expected_nullifiers); + } + + #[test] + fn test_convert_unauthenticated_note_to_authenticated() { + let txs = mock_proven_txs(); + let found_unauthenticated_notes = + BTreeMap::from_iter([(mock_note(5).id(), Default::default())]); + let batch = TransactionBatch::new(txs, found_unauthenticated_notes).unwrap(); + + let expected_input_notes = + vec![mock_unauthenticated_note_commitment(1), mock_note(5).nullifier().into()]; + assert_eq!(batch.input_notes, expected_input_notes); + } + + // UTILITIES + // ============================================================================================= + + fn mock_proven_txs() -> Vec { + vec![ + mock_proven_tx( + 1, + vec![mock_note(1)], + vec![mock_output_note(2), mock_output_note(3), mock_output_note(4)], + ), + mock_proven_tx( + 2, + vec![mock_note(5)], + vec![mock_output_note(6), mock_output_note(7), mock_output_note(8)], + ), + ] + } +} diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index ab54615c4..d66c7a33d 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -170,7 +170,7 @@ where // and only then checking against the other ready batches let dangling_notes = self.find_dangling_notes(&txs).await; let found_unauthenticated_notes = match dangling_notes.is_empty() { - true => None, + true => Default::default(), false => { let stored_notes = match self.store.get_note_authentication_info(dangling_notes.iter()).await { @@ -186,7 +186,7 @@ where return Err(BuildBatchError::UnauthenticatedNotesNotFound(missing_notes, txs)); } - Some(stored_notes) + stored_notes }, }; diff --git a/crates/block-producer/src/batch_builder/tests/mod.rs b/crates/block-producer/src/batch_builder/tests/mod.rs index a20686826..173c23016 100644 --- a/crates/block-producer/src/batch_builder/tests/mod.rs +++ b/crates/block-producer/src/batch_builder/tests/mod.rs @@ -4,10 +4,12 @@ use tokio::sync::RwLock; use super::*; use crate::{ + block_builder::DefaultBlockBuilder, errors::BuildBlockError, - test_utils::{MockProvenTxBuilder, MockStoreSuccessBuilder}, + test_utils::{ + note::mock_note, MockPrivateAccount, MockProvenTxBuilder, MockStoreSuccessBuilder, + }, }; - // STRUCTS // ================================================================================================ @@ -146,6 +148,159 @@ async fn test_batches_added_back_to_queue_on_block_build_failure() { assert_eq!(internal_ready_batches.read().await.len(), 3); } +#[tokio::test] +async fn test_batch_builder_find_dangling_notes() { + let store = Arc::new(MockStoreSuccessBuilder::from_accounts(iter::empty()).build()); + let block_builder = Arc::new(BlockBuilderSuccess::default()); + + let batch_builder = Arc::new(DefaultBatchBuilder::new( + store, + block_builder, + DefaultBatchBuilderOptions { + block_frequency: Duration::from_millis(20), + max_batches_per_block: 2, + }, + )); + + let note_1 = mock_note(1); + let note_2 = mock_note(2); + let tx1 = MockProvenTxBuilder::with_account_index(1) + .output_notes(vec![OutputNote::Full(note_1.clone())]) + .build(); + let tx2 = MockProvenTxBuilder::with_account_index(1) + .unauthenticated_notes(vec![note_1.clone()]) + .output_notes(vec![OutputNote::Full(note_2.clone())]) + .build(); + + let txs = vec![tx1, tx2]; + + let dangling_notes = batch_builder.find_dangling_notes(&txs).await; + assert_eq!(dangling_notes, vec![], "Note must be presented in the same batch"); + + batch_builder.build_batch(txs.clone()).await.unwrap(); + + let dangling_notes = batch_builder.find_dangling_notes(&txs).await; + assert_eq!(dangling_notes, vec![], "Note must be presented in the same batch"); + + let note_3 = mock_note(3); + + let tx1 = MockProvenTxBuilder::with_account_index(1) + .unauthenticated_notes(vec![note_2.clone()]) + .build(); + let tx2 = MockProvenTxBuilder::with_account_index(1) + .unauthenticated_notes(vec![note_3.clone()]) + .build(); + + let txs = vec![tx1, tx2]; + + let dangling_notes = batch_builder.find_dangling_notes(&txs).await; + assert_eq!( + dangling_notes, + vec![note_3.id()], + "Only one dangling node must be found before block is built" + ); + + batch_builder.try_build_block().await; + + let dangling_notes = batch_builder.find_dangling_notes(&txs).await; + assert_eq!( + dangling_notes, + vec![note_2.id(), note_3.id()], + "Two dangling notes must be found after block is built" + ); +} + +#[tokio::test] +async fn test_block_builder_no_missing_notes() { + let account_1: MockPrivateAccount<3> = MockPrivateAccount::from(1); + let account_2: MockPrivateAccount<3> = MockPrivateAccount::from(2); + let store = Arc::new( + MockStoreSuccessBuilder::from_accounts( + [account_1, account_2].iter().map(|account| (account.id, account.states[0])), + ) + .build(), + ); + let block_builder = Arc::new(DefaultBlockBuilder::new(Arc::clone(&store), Arc::clone(&store))); + let batch_builder = Arc::new(DefaultBatchBuilder::new( + store, + Arc::clone(&block_builder), + DefaultBatchBuilderOptions { + block_frequency: Duration::from_millis(20), + max_batches_per_block: 2, + }, + )); + + let note_1 = mock_note(1); + let note_2 = mock_note(2); + + let tx1 = MockProvenTxBuilder::with_account_index(1) + .output_notes(vec![OutputNote::Full(note_1.clone())]) + .build(); + + let tx2 = MockProvenTxBuilder::with_account_index(2) + .unauthenticated_notes(vec![note_1.clone()]) + .output_notes(vec![OutputNote::Full(note_2.clone())]) + .build(); + + let txs = vec![tx1, tx2]; + + batch_builder.build_batch(txs.clone()).await.unwrap(); + + let build_block_result = batch_builder + .block_builder + .build_block(&batch_builder.ready_batches.read().await) + .await; + assert_eq!(build_block_result, Ok(())); +} + +#[tokio::test] +async fn test_block_builder_fails_if_notes_are_missing() { + let accounts: Vec<_> = (1..=4).map(MockPrivateAccount::<3>::from).collect(); + let notes: Vec<_> = (1..=6).map(mock_note).collect(); + + let store = Arc::new( + MockStoreSuccessBuilder::from_accounts( + accounts.iter().map(|account| (account.id, account.states[0])), + ) + .initial_notes([vec![OutputNote::Full(notes[0].clone())]].iter()) + .build(), + ); + let block_builder = Arc::new(DefaultBlockBuilder::new(Arc::clone(&store), Arc::clone(&store))); + let batch_builder = Arc::new(DefaultBatchBuilder::new( + store, + Arc::clone(&block_builder), + DefaultBatchBuilderOptions { + block_frequency: Duration::from_millis(20), + max_batches_per_block: 2, + }, + )); + + let tx1 = MockProvenTxBuilder::with_account_index(1) + .output_notes(vec![OutputNote::Full(notes[1].clone())]) + .build(); + + let tx2 = MockProvenTxBuilder::with_account_index(2) + .unauthenticated_notes(vec![notes[0].clone()]) + .output_notes(vec![OutputNote::Full(notes[2].clone()), OutputNote::Full(notes[3].clone())]) + .build(); + + let tx3 = MockProvenTxBuilder::with_account_index(3) + .unauthenticated_notes(notes.iter().skip(1).cloned().collect()) + .build(); + + let txs = vec![tx1, tx2, tx3]; + + let batch = TransactionBatch::new(txs.clone(), Default::default()).unwrap(); + let build_block_result = batch_builder.block_builder.build_block(&[batch]).await; + assert_eq!( + build_block_result, + Err(BuildBlockError::UnauthenticatedNotesNotFound(vec![ + notes[4].id(), + notes[5].id() + ])) + ); +} + // HELPERS // ================================================================================================ @@ -155,5 +310,5 @@ fn dummy_tx_batch(starting_account_index: u32, num_txs_in_batch: usize) -> Trans MockProvenTxBuilder::with_account_index(starting_account_index + index as u32).build() }) .collect(); - TransactionBatch::new(txs, None).unwrap() + TransactionBatch::new(txs, Default::default()).unwrap() } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 0c63ffae4..b6a64bd3e 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -109,13 +109,12 @@ where ) .await?; - if block_inputs.found_unauthenticated_notes.len() < dangling_notes.len() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound( - dangling_notes - .difference(&block_inputs.found_unauthenticated_notes) - .copied() - .collect(), - )); + let missing_notes: Vec<_> = dangling_notes + .difference(&block_inputs.found_unauthenticated_notes) + .copied() + .collect(); + if !missing_notes.is_empty() { + return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); } let block_header_witness = BlockWitness::new(block_inputs, batches)?; diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs index a0cf4ea23..1c7580761 100644 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ b/crates/block-producer/src/block_builder/prover/tests.rs @@ -69,7 +69,7 @@ fn test_block_witness_validation_inconsistent_account_ids() { ) .build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; let batch_2 = { @@ -80,7 +80,7 @@ fn test_block_witness_validation_inconsistent_account_ids() { ) .build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; vec![batch_1, batch_2] @@ -141,7 +141,7 @@ fn test_block_witness_validation_inconsistent_account_hashes() { Digest::default(), ) .build()], - None, + Default::default(), ) .unwrap(); let batch_2 = TransactionBatch::new( @@ -151,7 +151,7 @@ fn test_block_witness_validation_inconsistent_account_hashes() { Digest::default(), ) .build()], - None, + Default::default(), ) .unwrap(); @@ -235,8 +235,8 @@ async fn test_compute_account_root_success() { }) .collect(); - let batch_1 = TransactionBatch::new(txs[..2].to_vec(), None).unwrap(); - let batch_2 = TransactionBatch::new(txs[2..].to_vec(), None).unwrap(); + let batch_1 = TransactionBatch::new(txs[..2].to_vec(), Default::default()).unwrap(); + let batch_2 = TransactionBatch::new(txs[2..].to_vec(), Default::default()).unwrap(); vec![batch_1, batch_2] }; @@ -379,7 +379,7 @@ async fn test_compute_note_root_empty_notes_success() { .unwrap(); let batches: Vec = { - let batch = TransactionBatch::new(vec![], None).unwrap(); + let batch = TransactionBatch::new(vec![], Default::default()).unwrap(); vec![batch] }; @@ -447,13 +447,13 @@ async fn test_compute_note_root_success() { .map(|(note, &account_id)| { let note = OutputNote::Header(*note); MockProvenTxBuilder::with_account(account_id, Digest::default(), Digest::default()) - .notes_created(vec![note]) + .output_notes(vec![note]) .build() }) .collect(); - let batch_1 = TransactionBatch::new(txs[..2].to_vec(), None).unwrap(); - let batch_2 = TransactionBatch::new(txs[2..].to_vec(), None).unwrap(); + let batch_1 = TransactionBatch::new(txs[..2].to_vec(), Default::default()).unwrap(); + let batch_2 = TransactionBatch::new(txs[2..].to_vec(), Default::default()).unwrap(); vec![batch_1, batch_2] }; @@ -501,13 +501,13 @@ fn test_block_witness_validation_inconsistent_nullifiers() { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; vec![batch_1, batch_2] @@ -577,13 +577,13 @@ async fn test_compute_nullifier_root_empty_success() { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; vec![batch_1, batch_2] @@ -630,13 +630,13 @@ async fn test_compute_nullifier_root_success() { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; vec![batch_1, batch_2] diff --git a/crates/block-producer/src/block_builder/tests.rs b/crates/block-producer/src/block_builder/tests.rs index b146c66d6..c7c82629f 100644 --- a/crates/block-producer/src/block_builder/tests.rs +++ b/crates/block-producer/src/block_builder/tests.rs @@ -37,7 +37,7 @@ async fn test_apply_block_called_nonempty_batches() { ) .build(); - TransactionBatch::new(vec![tx], None).unwrap() + TransactionBatch::new(vec![tx], Default::default()).unwrap() }; vec![batch_1] diff --git a/crates/block-producer/src/state_view/tests/verify_tx.rs b/crates/block-producer/src/state_view/tests/verify_tx.rs index ee6bf3155..faec60205 100644 --- a/crates/block-producer/src/state_view/tests/verify_tx.rs +++ b/crates/block-producer/src/state_view/tests/verify_tx.rs @@ -12,10 +12,11 @@ use std::iter; +use miden_objects::notes::Note; use tokio::task::JoinSet; use super::*; -use crate::test_utils::MockStoreSuccessBuilder; +use crate::test_utils::{block::MockBlockBuilder, note::mock_note, MockStoreSuccessBuilder}; /// Tests the happy path where 3 transactions who modify different accounts and consume different /// notes all verify successfully @@ -198,7 +199,7 @@ async fn test_verify_tx_vt5() { // Notice: `consumed_note_in_both_txs` is NOT in the store let store = Arc::new( MockStoreSuccessBuilder::from_accounts( - vec![account_1, account_2] + [account_1, account_2] .into_iter() .map(|account| (account.id, account.states[0])), ) @@ -228,3 +229,80 @@ async fn test_verify_tx_vt5() { Err(VerifyTxError::InputNotesAlreadyConsumed(vec![nullifier_in_both_txs])) ); } + +/// Tests that `verify_tx()` succeeds when the unauthenticated input note found in the in-flight notes +#[tokio::test] +#[miden_node_test_macro::enable_logging] +async fn test_verify_tx_dangling_note_found_in_inflight_notes() { + let account_1: MockPrivateAccount<3> = MockPrivateAccount::from(1); + let account_2: MockPrivateAccount<3> = MockPrivateAccount::from(2); + let store = Arc::new( + MockStoreSuccessBuilder::from_accounts( + [account_1, account_2] + .into_iter() + .map(|account| (account.id, account.states[0])), + ) + .build(), + ); + let state_view = DefaultStateView::new(Arc::clone(&store), false); + + let dangling_notes = vec![mock_note(1)]; + let output_notes = dangling_notes.iter().cloned().map(OutputNote::Full).collect(); + + let tx1 = MockProvenTxBuilder::with_account_index(1).output_notes(output_notes).build(); + + let verify_tx1_result = state_view.verify_tx(&tx1).await; + assert_eq!(verify_tx1_result, Ok(())); + + let tx2 = MockProvenTxBuilder::with_account_index(2) + .unauthenticated_notes(dangling_notes.clone()) + .build(); + + let verify_tx2_result = state_view.verify_tx(&tx2).await; + assert_eq!( + verify_tx2_result, + Ok(()), + "Dangling unauthenticated notes must be found in the in-flight notes after previous tx verification" + ); +} + +/// Tests that `verify_tx()` fails when the unauthenticated input note not found not in the in-flight +/// notes nor in the store +#[tokio::test] +#[miden_node_test_macro::enable_logging] +async fn test_verify_tx_stored_unauthenticated_notes() { + let account_1: MockPrivateAccount<3> = MockPrivateAccount::from(1); + let store = Arc::new( + MockStoreSuccessBuilder::from_accounts( + [account_1].into_iter().map(|account| (account.id, account.states[0])), + ) + .build(), + ); + let dangling_notes = vec![mock_note(1)]; + let tx1 = MockProvenTxBuilder::with_account_index(1) + .unauthenticated_notes(dangling_notes.clone()) + .build(); + + let state_view = DefaultStateView::new(Arc::clone(&store), false); + + let verify_tx1_result = state_view.verify_tx(&tx1).await; + assert_eq!( + verify_tx1_result, + Err(VerifyTxError::UnauthenticatedNotesNotFound( + dangling_notes.iter().map(Note::id).collect() + )), + "Dangling unauthenticated notes must not be found in the store by this moment" + ); + + let output_notes = dangling_notes.into_iter().map(OutputNote::Full).collect(); + let block = MockBlockBuilder::new(&store).await.created_notes(vec![output_notes]).build(); + + store.apply_block(&block).await.unwrap(); + + let verify_tx1_result = state_view.verify_tx(&tx1).await; + assert_eq!( + verify_tx1_result, + Ok(()), + "Dangling unauthenticated notes must be found in the store after block applying" + ); +} diff --git a/crates/block-producer/src/test_utils/account.rs b/crates/block-producer/src/test_utils/account.rs index 13e8f985a..0facc1bda 100644 --- a/crates/block-producer/src/test_utils/account.rs +++ b/crates/block-producer/src/test_utils/account.rs @@ -1,10 +1,16 @@ +use std::{collections::HashMap, ops::Not}; + use miden_objects::{ accounts::{get_account_seed, AccountStorageType, AccountType}, Hasher, }; +use once_cell::sync::Lazy; use super::*; +pub static MOCK_ACCOUNTS: Lazy>> = + Lazy::new(Default::default); + /// A mock representation fo private accounts. An account starts in state `states[0]`, is modified /// to state `states[1]`, and so on. #[derive(Clone, Copy, Debug)] @@ -16,7 +22,19 @@ pub struct MockPrivateAccount { } impl MockPrivateAccount { - fn new(init_seed: [u8; 32], new_account: bool) -> Self { + fn new(id: AccountId, initial_state: Digest) -> Self { + let mut states = [Digest::default(); NUM_STATES]; + + states[0] = initial_state; + + for idx in 1..NUM_STATES { + states[idx] = Hasher::hash(&states[idx - 1].as_bytes()); + } + + Self { id, states } + } + + fn generate(init_seed: [u8; 32], new_account: bool) -> Self { let account_seed = get_account_seed( init_seed, AccountType::RegularAccountUpdatableCode, @@ -26,20 +44,10 @@ impl MockPrivateAccount { ) .unwrap(); - let mut states = [Digest::default(); NUM_STATES]; - - if !new_account { - states[0] = Hasher::hash(&init_seed); - } - - for idx in 1..NUM_STATES { - states[idx] = Hasher::hash(&states[idx - 1].as_bytes()); - } - - Self { - id: AccountId::new(account_seed, Digest::default(), Digest::default()).unwrap(), - states, - } + Self::new( + AccountId::new(account_seed, Digest::default(), Digest::default()).unwrap(), + new_account.not().then(|| Hasher::hash(&init_seed)).unwrap_or_default(), + ) } } @@ -47,13 +55,26 @@ impl From for MockPrivateAccount { /// Each index gives rise to a different account ID /// Passing index 0 signifies that it's a new account fn from(index: u32) -> Self { + let mut lock = MOCK_ACCOUNTS.lock().expect("Poisoned mutex"); + if let Some(&(account_id, init_state)) = lock.get(&index) { + return Self::new(account_id, init_state); + } + let init_seed: Vec<_> = index.to_be_bytes().into_iter().chain([0u8; 28]).collect(); // using index 0 signifies that it's a new account - if index == 0 { - Self::new(init_seed.try_into().unwrap(), true) + let account = if index == 0 { + Self::generate(init_seed.try_into().unwrap(), true) } else { - Self::new(init_seed.try_into().unwrap(), false) - } + Self::generate(init_seed.try_into().unwrap(), false) + }; + + lock.insert(index, (account.id, account.states[0])); + + account } } + +pub fn mock_account_id(num: u8) -> AccountId { + MockPrivateAccount::<3>::from(num as u32).id +} diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index f340824d9..889fc8829 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -24,7 +24,7 @@ impl TransactionBatchConstructor for TransactionBatch { }) .collect(); - Self::new(txs, None).unwrap() + Self::new(txs, Default::default()).unwrap() } fn from_txs(starting_account_index: u32, num_txs_in_batch: u64) -> Self { @@ -36,6 +36,6 @@ impl TransactionBatchConstructor for TransactionBatch { }) .collect(); - Self::new(txs, None).unwrap() + Self::new(txs, Default::default()).unwrap() } } diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 9bba6e5bf..44c961014 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -45,6 +45,8 @@ pub async fn build_expected_block_header( store_chain_mmr.peaks(store_chain_mmr.forest()).unwrap().hash_peaks() }; + let note_created_smt = note_created_smt_from_note_batches(block_output_notes(batches.iter())); + // Build header BlockHeader::new( 0, @@ -54,7 +56,7 @@ pub async fn build_expected_block_header( new_account_root, // FIXME: FILL IN CORRECT NULLIFIER ROOT Digest::default(), - note_created_smt_from_batches(batches).root(), + note_created_smt.root(), Digest::default(), Digest::default(), 1, @@ -93,7 +95,7 @@ pub struct MockBlockBuilder { last_block_header: BlockHeader, updated_accounts: Option>, - created_note: Option>, + created_notes: Option>, produced_nullifiers: Option>, } @@ -105,7 +107,7 @@ impl MockBlockBuilder { last_block_header: *store.last_block_header.read().await, updated_accounts: None, - created_note: None, + created_notes: None, produced_nullifiers: None, } } @@ -121,6 +123,12 @@ impl MockBlockBuilder { self } + pub fn created_notes(mut self, created_notes: Vec) -> Self { + self.created_notes = Some(created_notes); + + self + } + pub fn produced_nullifiers(mut self, produced_nullifiers: Vec) -> Self { self.produced_nullifiers = Some(produced_nullifiers); @@ -128,7 +136,7 @@ impl MockBlockBuilder { } pub fn build(self) -> Block { - let created_notes = self.created_note.unwrap_or_default(); + let created_notes = self.created_notes.unwrap_or_default(); let header = BlockHeader::new( 0, @@ -153,22 +161,27 @@ impl MockBlockBuilder { } } +pub(crate) fn flatten_output_notes<'a>( + batches: impl Iterator, +) -> impl Iterator { + batches.enumerate().flat_map(|(batch_idx, batch)| { + batch.iter().enumerate().map(move |(note_idx_in_batch, note)| { + (BlockNoteIndex::new(batch_idx, note_idx_in_batch), note) + }) + }) +} + pub(crate) fn note_created_smt_from_note_batches<'a>( - batches: impl Iterator + Clone + 'a)>, + batches: impl Iterator, ) -> BlockNoteTree { - let note_leaf_iterator = batches.enumerate().flat_map(|(batch_idx, batch)| { - batch.clone().into_iter().enumerate().map(move |(note_idx_in_batch, note)| { - ( - BlockNoteIndex::new(batch_idx, note_idx_in_batch), - note.id().into(), - *note.metadata(), - ) - }) - }); + let note_leaf_iterator = flatten_output_notes(batches) + .map(|(index, note)| (index, note.id().into(), *note.metadata())); BlockNoteTree::with_entries(note_leaf_iterator).unwrap() } -pub(crate) fn note_created_smt_from_batches(batches: &[TransactionBatch]) -> BlockNoteTree { - note_created_smt_from_note_batches(batches.iter().map(TransactionBatch::output_notes)) +pub(crate) fn block_output_notes<'a>( + batches: impl Iterator + Clone, +) -> impl Iterator + Clone { + batches.map(TransactionBatch::output_notes) } diff --git a/crates/block-producer/src/test_utils/mod.rs b/crates/block-producer/src/test_utils/mod.rs index bd0a63afd..07a7a4b52 100644 --- a/crates/block-producer/src/test_utils/mod.rs +++ b/crates/block-producer/src/test_utils/mod.rs @@ -5,7 +5,7 @@ use tokio::sync::RwLock; mod proven_tx; -pub use proven_tx::MockProvenTxBuilder; +pub use proven_tx::{mock_proven_tx, MockProvenTxBuilder}; mod store; @@ -13,8 +13,10 @@ pub use store::{MockStoreFailure, MockStoreSuccess, MockStoreSuccessBuilder}; mod account; -pub use account::MockPrivateAccount; +pub use account::{mock_account_id, MockPrivateAccount}; pub mod block; pub mod batch; + +pub mod note; diff --git a/crates/block-producer/src/test_utils/note.rs b/crates/block-producer/src/test_utils/note.rs new file mode 100644 index 000000000..0a3b0a7d2 --- /dev/null +++ b/crates/block-producer/src/test_utils/note.rs @@ -0,0 +1,24 @@ +use miden_lib::transaction::TransactionKernel; +use miden_objects::{ + notes::Note, + testing::notes::NoteBuilder, + transaction::{InputNote, InputNoteCommitment, OutputNote}, +}; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + +use crate::test_utils::account::mock_account_id; + +pub fn mock_note(num: u8) -> Note { + let sender = mock_account_id(num); + NoteBuilder::new(sender, ChaCha20Rng::from_seed([num; 32])) + .build(&TransactionKernel::assembler().with_debug_mode(true)) + .unwrap() +} + +pub fn mock_unauthenticated_note_commitment(num: u8) -> InputNoteCommitment { + InputNote::unauthenticated(mock_note(num)).into() +} + +pub fn mock_output_note(num: u8) -> OutputNote { + OutputNote::Full(mock_note(num)) +} diff --git a/crates/block-producer/src/test_utils/proven_tx.rs b/crates/block-producer/src/test_utils/proven_tx.rs index 92ffa5369..3b9cce2c1 100644 --- a/crates/block-producer/src/test_utils/proven_tx.rs +++ b/crates/block-producer/src/test_utils/proven_tx.rs @@ -3,8 +3,8 @@ use std::ops::Range; use miden_air::HashFunction; use miden_objects::{ accounts::AccountId, - notes::{NoteHeader, NoteMetadata, NoteType, Nullifier}, - transaction::{OutputNote, ProvenTransaction, ProvenTransactionBuilder}, + notes::{Note, NoteHeader, NoteMetadata, NoteType, Nullifier}, + transaction::{InputNote, OutputNote, ProvenTransaction, ProvenTransactionBuilder}, vm::ExecutionProof, Digest, Felt, Hasher, ONE, }; @@ -16,7 +16,8 @@ pub struct MockProvenTxBuilder { account_id: AccountId, initial_account_hash: Digest, final_account_hash: Digest, - notes_created: Option>, + output_notes: Option>, + input_notes: Option>, nullifiers: Option>, } @@ -36,19 +37,26 @@ impl MockProvenTxBuilder { account_id, initial_account_hash, final_account_hash, - notes_created: None, + output_notes: None, + input_notes: None, nullifiers: None, } } + pub fn unauthenticated_notes(mut self, notes: Vec) -> Self { + self.input_notes = Some(notes.into_iter().map(InputNote::unauthenticated).collect()); + + self + } + pub fn nullifiers(mut self, nullifiers: Vec) -> Self { self.nullifiers = Some(nullifiers); self } - pub fn notes_created(mut self, notes: Vec) -> Self { - self.notes_created = Some(notes); + pub fn output_notes(mut self, notes: Vec) -> Self { + self.output_notes = Some(notes); self } @@ -76,7 +84,7 @@ impl MockProvenTxBuilder { }) .collect(); - self.notes_created(notes) + self.output_notes(notes) } pub fn build(self) -> ProvenTransaction { @@ -87,9 +95,21 @@ impl MockProvenTxBuilder { Digest::default(), ExecutionProof::new(StarkProof::new_dummy(), HashFunction::Blake3_192), ) - .add_input_notes(self.nullifiers.unwrap_or_default().iter().copied()) - .add_output_notes(self.notes_created.unwrap_or_default()) + .add_input_notes(self.input_notes.unwrap_or_default()) + .add_input_notes(self.nullifiers.unwrap_or_default()) + .add_output_notes(self.output_notes.unwrap_or_default()) .build() .unwrap() } } + +pub fn mock_proven_tx( + account_index: u8, + unauthenticated_notes: Vec, + output_notes: Vec, +) -> ProvenTransaction { + MockProvenTxBuilder::with_account_index(account_index.into()) + .unauthenticated_notes(unauthenticated_notes) + .output_notes(output_notes) + .build() +} diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index 8d684fb7b..d3d6a3ed7 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -1,14 +1,14 @@ use std::{ collections::{BTreeMap, BTreeSet}, num::NonZeroU32, + ops::Not, }; use async_trait::async_trait; use miden_objects::{ - block::{Block, BlockNoteTree}, + block::{Block, BlockNoteTree, NoteBatch}, crypto::merkle::{MerklePath, Mmr, SimpleSmt, Smt, ValuePath}, notes::{NoteId, Nullifier}, - transaction::OutputNote, BlockHeader, ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, }; @@ -20,7 +20,9 @@ use crate::{ store::{ ApplyBlock, ApplyBlockError, BlockInputsError, Store, TransactionInputs, TxInputsError, }, - test_utils::block::{note_created_smt_from_batches, note_created_smt_from_note_batches}, + test_utils::block::{ + block_output_notes, flatten_output_notes, note_created_smt_from_note_batches, + }, ProvenTransaction, }; @@ -28,29 +30,31 @@ use crate::{ #[derive(Debug)] pub struct MockStoreSuccessBuilder { accounts: Option>, - notes: Option, + notes: Option>, + note_root: Option, produced_nullifiers: Option>, chain_mmr: Option, block_num: Option, } impl MockStoreSuccessBuilder { - pub fn from_batches<'a>(batches: impl Iterator) -> Self { - let batches: Vec<_> = batches.cloned().collect(); - + pub fn from_batches<'a>( + batches_iter: impl Iterator + Clone, + ) -> Self { let accounts_smt = { - let accounts = batches - .iter() + let accounts = batches_iter + .clone() .flat_map(TransactionBatch::account_initial_states) .map(|(account_id, hash)| (account_id.into(), hash.into())); SimpleSmt::::with_leaves(accounts).unwrap() }; - let created_notes = note_created_smt_from_batches(&batches); + let (note_tree, notes) = Self::populate_note_trees(block_output_notes(batches_iter)); Self { accounts: Some(accounts_smt), - notes: Some(created_notes), + notes: Some(notes), + note_root: Some(note_tree.root()), produced_nullifiers: None, chain_mmr: None, block_num: None, @@ -67,17 +71,18 @@ impl MockStoreSuccessBuilder { Self { accounts: Some(accounts_smt), notes: None, + note_root: None, produced_nullifiers: None, chain_mmr: None, block_num: None, } } - pub fn initial_notes<'a>( - mut self, - notes: impl Iterator + Clone + 'a)>, - ) -> Self { - self.notes = Some(note_created_smt_from_note_batches(notes)); + pub fn initial_notes<'a>(mut self, notes: impl Iterator + Clone) -> Self { + let (note_tree, notes) = Self::populate_note_trees(notes); + + self.notes = Some(notes); + self.note_root = Some(note_tree.root()); self } @@ -100,10 +105,22 @@ impl MockStoreSuccessBuilder { self } + fn populate_note_trees<'a>( + batches_iterator: impl Iterator + Clone, + ) -> (BlockNoteTree, BTreeMap) { + let block_note_tree = note_created_smt_from_note_batches(batches_iterator.clone()); + let note_map = flatten_output_notes(batches_iterator) + .map(|(index, note)| (note.id(), block_note_tree.get_note_path(index).unwrap())) + .collect(); + + (block_note_tree, note_map) + } + pub fn build(self) -> MockStoreSuccess { let block_num = self.block_num.unwrap_or(1); let accounts_smt = self.accounts.unwrap_or(SimpleSmt::new().unwrap()); - let notes_smt = self.notes.unwrap_or_default(); + let notes = self.notes.unwrap_or_default(); + let note_root = self.note_root.unwrap_or_default(); let chain_mmr = self.chain_mmr.unwrap_or_default(); let nullifiers_smt = self .produced_nullifiers @@ -124,7 +141,7 @@ impl MockStoreSuccessBuilder { chain_mmr.peaks(chain_mmr.forest()).unwrap().hash_peaks(), accounts_smt.root(), nullifiers_smt.root(), - notes_smt.root(), + note_root, Digest::default(), Digest::default(), 1, @@ -135,7 +152,8 @@ impl MockStoreSuccessBuilder { produced_nullifiers: Arc::new(RwLock::new(nullifiers_smt)), chain_mmr: Arc::new(RwLock::new(chain_mmr)), last_block_header: Arc::new(RwLock::new(initial_block_header)), - num_apply_block_called: Arc::new(RwLock::new(0)), + num_apply_block_called: Default::default(), + notes: Arc::new(RwLock::new(notes)), } } } @@ -155,6 +173,9 @@ pub struct MockStoreSuccess { /// The number of times `apply_block()` was called pub num_apply_block_called: Arc>, + + /// Maps note id -> note inclusion proof for all created notes + pub notes: Arc>>, } impl MockStoreSuccess { @@ -191,6 +212,15 @@ impl ApplyBlock for MockStoreSuccess { chain_mmr.add(block.hash()); } + // build note tree + let note_tree = block.build_note_tree(); + + // update notes + let mut locked_notes = self.notes.write().await; + for (note_index, note) in block.notes() { + locked_notes.insert(note.id(), note_tree.get_note_path(note_index).unwrap_or_default()); + } + // update last block header *self.last_block_header.write().await = block.header(); @@ -231,11 +261,20 @@ impl Store for MockStoreSuccess { }) .collect(); + let locked_notes = self.notes.read().await; + let missing_unauthenticated_notes = proven_tx + .get_unauthenticated_notes() + .filter_map(|header| { + let id = header.id(); + locked_notes.contains_key(&id).not().then_some(id) + }) + .collect(); + Ok(TransactionInputs { account_id: proven_tx.account_id(), account_hash, nullifiers, - missing_unauthenticated_notes: Default::default(), + missing_unauthenticated_notes, }) } @@ -268,7 +307,9 @@ impl Store for MockStoreSuccess { .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) .collect(); - let found_unauthenticated_notes = notes.copied().collect(); + let locked_notes = self.notes.read().await; + let found_unauthenticated_notes = + notes.filter(|&id| locked_notes.contains_key(id)).copied().collect(); Ok(BlockInputs { block_header: *self.last_block_header.read().await, @@ -281,9 +322,14 @@ impl Store for MockStoreSuccess { async fn get_note_authentication_info( &self, - _notes: impl Iterator + Send, + notes: impl Iterator + Send, ) -> Result, NotePathsError> { - todo!() + let locked_notes = self.notes.read().await; + let note_auth_info = notes + .filter_map(|note_id| locked_notes.get(note_id).map(|path| (*note_id, path.clone()))) + .collect(); + + Ok(note_auth_info) } } diff --git a/crates/block-producer/src/txqueue/tests/mod.rs b/crates/block-producer/src/txqueue/tests/mod.rs index 079290449..49ac130e6 100644 --- a/crates/block-producer/src/txqueue/tests/mod.rs +++ b/crates/block-producer/src/txqueue/tests/mod.rs @@ -40,8 +40,8 @@ impl BatchBuilderSuccess { #[async_trait] impl BatchBuilder for BatchBuilderSuccess { async fn build_batch(&self, txs: Vec) -> Result<(), BuildBatchError> { - let batch = - TransactionBatch::new(txs, None).expect("Tx batch building should have succeeded"); + let batch = TransactionBatch::new(txs, Default::default()) + .expect("Tx batch building should have succeeded"); self.ready_batches .send(batch) .expect("Sending to channel should have succeeded"); @@ -105,7 +105,8 @@ async fn test_build_batch_success() { receiver.try_recv(), "A single transaction produces a single batch" ); - let expected = TransactionBatch::new(vec![tx.clone()], None).expect("Valid transactions"); + let expected = + TransactionBatch::new(vec![tx.clone()], Default::default()).expect("Valid transactions"); assert_eq!(expected, batch, "The batch should have the one transaction added to the queue"); // a batch will include up to `batch_size` transactions @@ -124,7 +125,7 @@ async fn test_build_batch_success() { receiver.try_recv(), "{batch_size} transactions create a single batch" ); - let expected = TransactionBatch::new(txs, None).expect("Valid transactions"); + let expected = TransactionBatch::new(txs, Default::default()).expect("Valid transactions"); assert_eq!(expected, batch, "The batch should the transactions to fill a batch"); // the transaction queue eagerly produces batches @@ -139,7 +140,8 @@ async fn test_build_batch_success() { for expected_batch in txs.chunks(batch_size).map(|txs| txs.to_vec()) { tokio::time::advance(build_batch_frequency).await; let batch = receiver.try_recv().expect("Queue not empty"); - let expected = TransactionBatch::new(expected_batch, None).expect("Valid transactions"); + let expected = + TransactionBatch::new(expected_batch, Default::default()).expect("Valid transactions"); assert_eq!(expected, batch, "The batch should the transactions to fill a batch"); } From ed1edf03424a58fbdd37b7de0f34429a1e14bbd5 Mon Sep 17 00:00:00 2001 From: Paul-Henry Kajfasz <42912740+phklive@users.noreply.github.com> Date: Mon, 22 Jul 2024 18:57:51 +0100 Subject: [PATCH 09/11] chore: added warning on `CHANGELOG.md` & `rust-toolchain.toml` (#413) --- .github/workflows/changelog.yml | 23 +++++++++++++ .github/workflows/lint.yml | 34 ++++++++++++------- CHANGELOG.md | 60 ++++++++++++++++----------------- rust-toolchain | 1 - rust-toolchain.toml | 4 +++ scripts/check-changelog.sh | 21 ++++++++++++ scripts/check-rust-version.sh | 12 ++++--- 7 files changed, 107 insertions(+), 48 deletions(-) create mode 100644 .github/workflows/changelog.yml delete mode 100644 rust-toolchain create mode 100644 rust-toolchain.toml create mode 100755 scripts/check-changelog.sh diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml new file mode 100644 index 000000000..5fde0d8d0 --- /dev/null +++ b/.github/workflows/changelog.yml @@ -0,0 +1,23 @@ +# Runs changelog related jobs. +# CI job heavily inspired by: https://github.com/tarides/changelog-check-action + +name: changelog + +on: + pull_request: + types: [opened, reopened, synchronize] + +jobs: + changelog: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@main + with: + fetch-depth: 0 + - name: Check for changes in changelog + env: + BASE_REF: ${{ github.event.pull_request.base.ref }} + NO_CHANGELOG_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'no changelog') }} + run: ./scripts/check-changelog.sh "${{ inputs.changelog }}" + shell: bash diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index b8ec225e2..215848440 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,17 +9,6 @@ on: types: [opened, reopened, synchronize] jobs: - version: - name: check rust version consistency - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@main - with: - profile: minimal - override: true - - name: check rust versions - run: ./scripts/check-rust-version.sh - rustfmt: name: rustfmt check nightly on ubuntu-latest runs-on: ubuntu-latest @@ -40,4 +29,25 @@ jobs: run: | rustup update --no-self-update nightly rustup +nightly component add clippy - make clippy \ No newline at end of file + make clippy + + doc: + name: doc stable on ubuntu-latest + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main + - name: Build docs + run: | + rustup update --no-self-update + make doc + + version: + name: check rust version consistency + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main + with: + profile: minimal + override: true + - name: check rust versions + run: ./scripts/check-rust-version.sh diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c10b0224..2e8b4820d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,53 +4,53 @@ ### Enhancements -* [BREAKING] Configuration files with unknown properties are now rejected (#401). -* [BREAKING] Removed redundant node configuration properties (#401). -* Improve type safety of the transaction inputs nullifier mapping (#406). -* Embed the faucet's static website resources (#411). +- [BREAKING] Configuration files with unknown properties are now rejected (#401). +- [BREAKING] Removed redundant node configuration properties (#401). +- Improve type safety of the transaction inputs nullifier mapping (#406). +- Embed the faucet's static website resources (#411). +- Added warning on CI for `CHANGELOG.md` (#413). ## 0.4.0 (2024-07-04) ### Features -* Changed sync endpoint to return a list of committed transactions (#377). -* Added `aux` column to notes table (#384). -* Changed state sync endpoint to return a list of `TransactionSummary` objects instead of just transaction IDs (#386). -* Added support for unauthenticated transaction notes (#390). +- Changed sync endpoint to return a list of committed transactions (#377). +- Added `aux` column to notes table (#384). +- Changed state sync endpoint to return a list of `TransactionSummary` objects instead of just transaction IDs (#386). +- Added support for unauthenticated transaction notes (#390). ### Enhancements -* Standardized CI and Makefile across Miden repositories (#367) -* Removed client dependency from faucet (#368). -* Fixed faucet note script so that it uses the `aux` input (#387). -* Added crate to distribute node RPC protobuf files (#391). -* Add `init` command for node and faucet (#392). - +- Standardized CI and Makefile across Miden repositories (#367) +- Removed client dependency from faucet (#368). +- Fixed faucet note script so that it uses the `aux` input (#387). +- Added crate to distribute node RPC protobuf files (#391). +- Add `init` command for node and faucet (#392). ## 0.3.0 (2024-05-15) -* Added option to mint pulic notes in the faucet (#339). -* Renamed `note_hash` into `note_id` in the database (#336) -* Changed `version` and `timestamp` fields in `Block` message to `u32` (#337). -* [BREAKING] Implemented `NoteMetadata` protobuf message (#338). -* Added `GetBlockByNumber` endpoint (#340). -* Added block authentication data to the `GetBlockHeaderByNumber` RPC (#345). -* Enabled support for HTTP/1.1 requests for the RPC component (#352). +- Added option to mint pulic notes in the faucet (#339). +- Renamed `note_hash` into `note_id` in the database (#336) +- Changed `version` and `timestamp` fields in `Block` message to `u32` (#337). +- [BREAKING] Implemented `NoteMetadata` protobuf message (#338). +- Added `GetBlockByNumber` endpoint (#340). +- Added block authentication data to the `GetBlockHeaderByNumber` RPC (#345). +- Enabled support for HTTP/1.1 requests for the RPC component (#352). ## 0.2.1 (2024-04-27) -* Combined node components into a single binary (#323). +- Combined node components into a single binary (#323). ## 0.2.0 (2024-04-11) -* Implemented Docker-based node deployment (#257). -* Improved build process (#267, #272, #278). -* Implemented Nullifier tree wrapper (#275). -* [BREAKING] Added support for public accounts (#287, #293, #294). -* [BREAKING] Added support for public notes (#300, #310). -* Added `GetNotesById` endpoint (#298). -* Implemented amd64 debian packager (#312). +- Implemented Docker-based node deployment (#257). +- Improved build process (#267, #272, #278). +- Implemented Nullifier tree wrapper (#275). +- [BREAKING] Added support for public accounts (#287, #293, #294). +- [BREAKING] Added support for public notes (#300, #310). +- Added `GetNotesById` endpoint (#298). +- Implemented amd64 debian packager (#312). ## 0.1.0 (2024-03-11) -* Initial release. +- Initial release. diff --git a/rust-toolchain b/rust-toolchain deleted file mode 100644 index 8e95c75da..000000000 --- a/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -1.78 diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 000000000..4305111e7 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,4 @@ +[toolchain] +channel = "1.78" +components = ["rustfmt", "rust-src", "clippy"] +profile = "minimal" diff --git a/scripts/check-changelog.sh b/scripts/check-changelog.sh new file mode 100755 index 000000000..dbf14cdbb --- /dev/null +++ b/scripts/check-changelog.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -uo pipefail + +CHANGELOG_FILE="${1:-CHANGELOG.md}" + +if [ "${NO_CHANGELOG_LABEL}" = "true" ]; then + # 'no changelog' set, so finish successfully + echo "\"no changelog\" label has been set" + exit 0 +else + # a changelog check is required + # fail if the diff is empty + if git diff --exit-code "origin/${BASE_REF}" -- "${CHANGELOG_FILE}"; then + >&2 echo "Changes should come with an entry in the \"CHANGELOG.md\" file. This behavior +can be overridden by using the \"no changelog\" label, which is used for changes +that are trivial / explicitely stated not to require a changelog entry." + exit 1 + fi + + echo "The \"CHANGELOG.md\" file has been updated." +fi diff --git a/scripts/check-rust-version.sh b/scripts/check-rust-version.sh index a98dd8bf3..1e795bc0e 100755 --- a/scripts/check-rust-version.sh +++ b/scripts/check-rust-version.sh @@ -1,12 +1,14 @@ #!/bin/bash -# Check rust-toolchain file -TOOLCHAIN_VERSION=$(cat rust-toolchain) +# Get rust-toolchain.toml file channel +TOOLCHAIN_VERSION=$(grep 'channel' rust-toolchain.toml | sed -E 's/.*"(.*)".*/\1/') -# Check workspace Cargo.toml file -CARGO_VERSION=$(cat Cargo.toml | grep "rust-version" | cut -d '"' -f 2) +# Get workspace Cargo.toml file rust-version +CARGO_VERSION=$(grep 'rust-version' Cargo.toml | sed -E 's/.*"(.*)".*/\1/') + +# Check version match if [ "$CARGO_VERSION" != "$TOOLCHAIN_VERSION" ]; then - echo "Mismatch in $file. Expected $TOOLCHAIN_VERSION, found $CARGO_VERSION" + echo "Mismatch in Cargo.toml: Expected $TOOLCHAIN_VERSION, found $CARGO_VERSION" exit 1 fi From bc5da6b49ed121681b84a9ab970c76ed0d58d01a Mon Sep 17 00:00:00 2001 From: Mirko von Leipzig <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Mon, 22 Jul 2024 22:38:20 +0200 Subject: [PATCH 10/11] fix: only rebuild proto on changes (#412) --- .github/workflows/lint.yml | 12 ++++++ CHANGELOG.md | 5 +++ crates/proto/build.rs | 50 +++++++++++++++++++++++-- crates/proto/src/generated/mod.rs | 2 + crates/proto/src/generated/rustfmt.toml | 1 - crates/rpc-proto/build.rs | 41 ++++++++++---------- crates/rpc-proto/src/proto_files.rs | 16 ++++---- 7 files changed, 94 insertions(+), 33 deletions(-) delete mode 100644 crates/proto/src/generated/rustfmt.toml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 215848440..911d8f2c8 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -51,3 +51,15 @@ jobs: override: true - name: check rust versions run: ./scripts/check-rust-version.sh + + proto: + name: proto check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main + - name: Update rust toolchain + run: rustup update --no-self-update + - name: Rebuild protos + run: BUILD_PROTO=1 cargo check -p miden-node-rpc -p miden-rpc-proto + - name: Diff check + run: git diff --exit-code diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e8b4820d..e6e01fd35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,8 +8,13 @@ - [BREAKING] Removed redundant node configuration properties (#401). - Improve type safety of the transaction inputs nullifier mapping (#406). - Embed the faucet's static website resources (#411). +- CI check for proto file consistency (#412). - Added warning on CI for `CHANGELOG.md` (#413). +### Fixes + +- `miden-node-proto`'s build script always triggers (#412). + ## 0.4.0 (2024-07-04) ### Features diff --git a/crates/proto/build.rs b/crates/proto/build.rs index 86bc2551e..f35b1d7d9 100644 --- a/crates/proto/build.rs +++ b/crates/proto/build.rs @@ -1,4 +1,7 @@ -use std::{env, fs, path::PathBuf}; +use std::{ + env, fs, + path::{Path, PathBuf}, +}; use miette::IntoDiagnostic; use prost::Message; @@ -8,14 +11,21 @@ use prost::Message; /// This is done only if BUILD_PROTO environment variable is set to `1` to avoid running the script /// on crates.io where repo-level .proto files are not available. fn main() -> miette::Result<()> { - println!("cargo:rerun-if-changed=generated"); println!("cargo:rerun-if-changed=../../proto"); - // skip this build script in BUILD_PROTO environment variable is not set to `1` + // Skip this build script in BUILD_PROTO environment variable is not set to `1`. if env::var("BUILD_PROTO").unwrap_or("0".to_string()) == "0" { return Ok(()); } + let crate_root: PathBuf = + env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR should be set").into(); + let dst_dir = crate_root.join("src").join("generated"); + + // Remove all existing files. + fs::remove_dir_all(&dst_dir).into_diagnostic()?; + fs::create_dir(&dst_dir).into_diagnostic()?; + // Compute the directory of the `proto` definitions let cwd: PathBuf = env::current_dir().into_diagnostic()?; @@ -47,9 +57,41 @@ fn main() -> miette::Result<()> { tonic_build::configure() .file_descriptor_set_path(&file_descriptor_path) .skip_protoc_run() - .out_dir("src/generated") + .out_dir(&dst_dir) .compile_with_config(prost_config, protos, includes) .into_diagnostic()?; + generate_mod_rs(&dst_dir).into_diagnostic()?; + Ok(()) } + +/// Generate `mod.rs` which includes all files in the folder as submodules. +fn generate_mod_rs(directory: impl AsRef) -> std::io::Result<()> { + let mod_filepath = directory.as_ref().join("mod.rs"); + + // Discover all submodules by iterating over the folder contents. + let mut submodules = Vec::new(); + for entry in fs::read_dir(directory)? { + let entry = entry?; + let path = entry.path(); + if path.is_file() { + let file_stem = path + .file_stem() + .and_then(|f| f.to_str()) + .expect("Could not get file name") + .to_owned(); + + submodules.push(file_stem); + } + } + + submodules.sort(); + + let contents = submodules.iter().map(|f| format!("pub mod {f};\n")); + let contents = std::iter::once("// Generated by build.rs\n\n".to_owned()) + .chain(contents) + .collect::(); + + fs::write(mod_filepath, contents) +} diff --git a/crates/proto/src/generated/mod.rs b/crates/proto/src/generated/mod.rs index a1ee08694..aa4409800 100644 --- a/crates/proto/src/generated/mod.rs +++ b/crates/proto/src/generated/mod.rs @@ -1,3 +1,5 @@ +// Generated by build.rs + pub mod account; pub mod block_header; pub mod block_producer; diff --git a/crates/proto/src/generated/rustfmt.toml b/crates/proto/src/generated/rustfmt.toml deleted file mode 100644 index c7ad93baf..000000000 --- a/crates/proto/src/generated/rustfmt.toml +++ /dev/null @@ -1 +0,0 @@ -disable_all_formatting = true diff --git a/crates/rpc-proto/build.rs b/crates/rpc-proto/build.rs index 5961128f4..c946ce043 100644 --- a/crates/rpc-proto/build.rs +++ b/crates/rpc-proto/build.rs @@ -1,7 +1,5 @@ use std::{ - env, - fs::{self, File}, - io::{self, Read, Write}, + env, fs, io, path::{Path, PathBuf}, }; @@ -22,7 +20,6 @@ const DOC_COMMENT: &str = /// This is done only if BUILD_PROTO environment variable is set to `1` to avoid running the script /// on crates.io where repo-level .proto files are not available. fn main() -> io::Result<()> { - println!("cargo:rerun-if-changed=proto"); println!("cargo:rerun-if-changed=../../proto"); // skip this build script in BUILD_PROTO environment variable is not set to `1` @@ -30,35 +27,40 @@ fn main() -> io::Result<()> { return Ok(()); } - // copy all .proto files into this crate. all these files need to be local to the crate to + // Copy all .proto files into this crate. all these files need to be local to the crate to // publish the crate to crates.io + fs::remove_dir_all(CRATE_PROTO_DIR)?; + fs::create_dir(CRATE_PROTO_DIR)?; copy_proto_files()?; let out_dir = env::current_dir().expect("Error getting cwd"); let dest_path = Path::new(&out_dir).join("./src/proto_files.rs"); - let mut file = File::create(dest_path)?; + fs::remove_file(&dest_path)?; - writeln!(file, "/// {DOC_COMMENT}")?; - writeln!(file, "pub const PROTO_FILES: &[(&str, &str)] = &[")?; - - for entry in std::fs::read_dir(CRATE_PROTO_DIR)? { + let mut proto_filenames = Vec::new(); + for entry in fs::read_dir(CRATE_PROTO_DIR)? { let entry = entry?; let path = entry.path(); if path.is_file() { - let mut file_content = String::new(); let file_name = path.file_name().and_then(|f| f.to_str()).expect("Could not get file name"); - File::open(&path)?.read_to_string(&mut file_content)?; - writeln!( - file, - " (\"{}\", include_str!(\"../{CRATE_PROTO_DIR}/{}\")),", - file_name, file_name - )?; + proto_filenames.push(format!( + " (\"{file_name}\", include_str!(\"../{CRATE_PROTO_DIR}/{file_name}\")),\n" + )); } } - - writeln!(file, "];")?; + // Sort so that the vector is consistent since directory walking order is + // not guaranteed, otherwise there will be diffs from different runs. + proto_filenames.sort(); + + let content = std::iter::once(format!( + "/// {DOC_COMMENT}\npub const PROTO_FILES: &[(&str, &str)] = &[\n" + )) + .chain(proto_filenames) + .chain(std::iter::once("];\n".to_string())) + .collect::(); + fs::write(dest_path, content)?; Ok(()) } @@ -73,7 +75,6 @@ fn copy_proto_files() -> io::Result<()> { fs::create_dir_all(dest_dir.clone())?; for entry in fs::read_dir(REPO_PROTO_DIR)? { let entry = entry?; - println!("{entry:?}"); let ty = entry.file_type()?; if !ty.is_dir() { fs::copy(entry.path(), dest_dir.join(entry.file_name()))?; diff --git a/crates/rpc-proto/src/proto_files.rs b/crates/rpc-proto/src/proto_files.rs index 497152192..b53677db4 100644 --- a/crates/rpc-proto/src/proto_files.rs +++ b/crates/rpc-proto/src/proto_files.rs @@ -1,16 +1,16 @@ /// A list of tuples containing the names and contents of various protobuf files. pub const PROTO_FILES: &[(&str, &str)] = &[ - ("note.proto", include_str!("../proto/note.proto")), - ("smt.proto", include_str!("../proto/smt.proto")), - ("responses.proto", include_str!("../proto/responses.proto")), - ("rpc.proto", include_str!("../proto/rpc.proto")), - ("store.proto", include_str!("../proto/store.proto")), - ("transaction.proto", include_str!("../proto/transaction.proto")), - ("mmr.proto", include_str!("../proto/mmr.proto")), ("account.proto", include_str!("../proto/account.proto")), ("block_header.proto", include_str!("../proto/block_header.proto")), - ("digest.proto", include_str!("../proto/digest.proto")), ("block_producer.proto", include_str!("../proto/block_producer.proto")), + ("digest.proto", include_str!("../proto/digest.proto")), ("merkle.proto", include_str!("../proto/merkle.proto")), + ("mmr.proto", include_str!("../proto/mmr.proto")), + ("note.proto", include_str!("../proto/note.proto")), ("requests.proto", include_str!("../proto/requests.proto")), + ("responses.proto", include_str!("../proto/responses.proto")), + ("rpc.proto", include_str!("../proto/rpc.proto")), + ("smt.proto", include_str!("../proto/smt.proto")), + ("store.proto", include_str!("../proto/store.proto")), + ("transaction.proto", include_str!("../proto/transaction.proto")), ]; From 10f0330f5df551f8b901143607a2bbaf2dfa9d7c Mon Sep 17 00:00:00 2001 From: SantiagoPittella Date: Tue, 23 Jul 2024 16:25:04 +0200 Subject: [PATCH 11/11] docs(README): improve configuration files documentation --- README.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c323aa675..1d1348b77 100644 --- a/README.md +++ b/README.md @@ -55,11 +55,14 @@ cargo install --features testing --path bin/node Currently, the only difference between the two is how long the `make-genesis` command will take to run (see next subsection). -### Generating the genesis file +### Generating the node configuration and genesis files -Before running the node, you must first generate the genesis file. The contents of the genesis file are fully configurable through a genesis inputs file written in TOML. An example genesis inputs file can be found here: [genesis.toml](./config/genesis.toml) +Before running the node, you must first generate the node configuration and genesis files. The contents of the genesis file are fully configurable through a genesis inputs file written in TOML. An example genesis inputs file can be found here: [genesis.toml](./config/genesis.toml). To generate both files, run: -To generate the genesis file, run: +```sh +miden-node init +``` +To generate the genesis block, run: ```sh miden-node make-genesis @@ -72,7 +75,7 @@ By default this will generate 1 file and 1 folder in the current directory: ### Running the node -Create a configuration file based on [node/miden-node.toml](./config/miden-node.toml), then create the necessary directories and start the node: +Using the node configuration file created in the previous step, start the node: ```sh mkdir -p /opt/miden