From 1140cfad8157fc8fdb627e48c0a17f2a341c8ee4 Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 3 Nov 2025 13:06:56 -0300 Subject: [PATCH 01/44] Use LazyLoadingBackend as default backend --- Cargo.lock | 1847 +++++++++------- crates/anvil-polkadot/Cargo.toml | 3 + .../substrate_node/lazy_loading/backend.rs | 1874 +++++++++++++++++ .../src/substrate_node/lazy_loading/mod.rs | 4 + .../substrate_node/lazy_loading/rpc_client.rs | 50 + .../anvil-polkadot/src/substrate_node/mod.rs | 1 + .../src/substrate_node/service/backend.rs | 12 +- .../src/substrate_node/service/client.rs | 11 +- .../src/substrate_node/service/mod.rs | 3 +- 9 files changed, 3089 insertions(+), 716 deletions(-) create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs diff --git a/Cargo.lock b/Cargo.lock index 699efddd176c2..3dd1be1244bff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1211,17 +1211,20 @@ dependencies = [ "foundry-test-utils", "futures", "futures-timer", + "hex-literal", "hyper 1.6.0", "indexmap 2.10.0", "itertools 0.14.0", "jsonrpsee", "libsecp256k1", + "log", "lru 0.16.0", "op-alloy-consensus 0.17.2", "op-alloy-rpc-types", "pallet-revive-eth-rpc", "parity-scale-codec", "parking_lot 0.12.4", + "polkadot-core-primitives 18.0.0", "polkadot-sdk", "rand 0.8.5", "revm", @@ -1229,7 +1232,7 @@ dependencies = [ "secp256k1 0.28.2", "serde", "serde_json", - "sp-runtime-interface", + "sp-runtime-interface 24.0.0", "sqlx", "substrate-runtime", "subxt", @@ -2001,8 +2004,8 @@ dependencies = [ "parachains-common", "parachains-runtimes-test-utils", "parity-scale-codec", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-parachain-info", "staging-xcm", "staging-xcm-builder", @@ -2029,8 +2032,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -2842,6 +2845,17 @@ dependencies = [ "parity-scale-codec", ] +[[package]] +name = "binary-merkle-tree" +version = "16.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95c9f6900c9fd344d53fbdfb36e1343429079d73f4168c8ef48884bf15616dbd" +dependencies = [ + "hash-db", + "log", + "parity-scale-codec", +] + [[package]] name = "bindgen" version = "0.69.5" @@ -3134,6 +3148,18 @@ dependencies = [ "serde", ] +[[package]] +name = "bounded-collections" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ad8a0bed7827f0b07a5d23cec2e58cc02038a99e4ca81616cb2bb2025f804d" +dependencies = [ + "log", + "parity-scale-codec", + "scale-info", + "serde", +] + [[package]] name = "bounded-collections" version = "0.3.2" @@ -3165,9 +3191,9 @@ dependencies = [ "scale-info", "serde", "sp-consensus-grandpa", - "sp-core", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -3181,9 +3207,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-std", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -3198,9 +3224,9 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -3215,9 +3241,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -3234,8 +3260,8 @@ dependencies = [ "pallet-utility", "parity-scale-codec", "scale-info", - "sp-runtime", - "sp-std", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -3251,12 +3277,12 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", - "sp-state-machine", - "sp-std", - "sp-trie", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-trie 29.0.0", "tracing", "trie-db", ] @@ -3273,12 +3299,12 @@ dependencies = [ "ed25519-dalek", "finality-grandpa", "parity-scale-codec", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-grandpa", - "sp-core", - "sp-runtime", - "sp-std", - "sp-trie", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-trie 29.0.0", ] [[package]] @@ -3292,9 +3318,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-std", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", ] @@ -3305,8 +3331,8 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "staging-xcm", ] @@ -3321,9 +3347,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "snowbridge-core", - "sp-core", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -3359,12 +3385,12 @@ dependencies = [ "parachains-common", "parachains-runtimes-test-utils", "parity-scale-codec", - "sp-core", - "sp-io", + "sp-core 28.0.0", + "sp-io 30.0.0", "sp-keyring", - "sp-runtime", - "sp-std", - "sp-tracing", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-tracing 16.0.0", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -3392,11 +3418,11 @@ dependencies = [ "pallet-utility", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", - "sp-std", - "sp-trie", - "sp-weights", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-trie 29.0.0", + "sp-weights 27.0.0", "staging-xcm", "tracing", "tuplex", @@ -4694,9 +4720,9 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-aura", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -4711,8 +4737,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", ] @@ -4739,14 +4765,14 @@ dependencies = [ "polkadot-runtime-parachains", "scale-info", "sp-consensus-babe", - "sp-core", - "sp-externalities", + "sp-core 28.0.0", + "sp-externalities 0.25.0", "sp-inherents", - "sp-io", - "sp-runtime", - "sp-state-machine", - "sp-std", - "sp-trie", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-trie 29.0.0", "sp-version", "staging-xcm", "staging-xcm-builder", @@ -4774,7 +4800,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -4789,7 +4815,7 @@ dependencies = [ "parity-scale-codec", "polkadot-primitives", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -4806,9 +4832,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", - "sp-trie", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-trie 29.0.0", ] [[package]] @@ -4821,8 +4847,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", ] @@ -4843,9 +4869,9 @@ dependencies = [ "polkadot-runtime-common", "polkadot-runtime-parachains", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -4863,7 +4889,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", "staging-xcm", ] @@ -4882,13 +4908,13 @@ version = "0.7.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "parity-scale-codec", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "polkadot-parachain-primitives", "polkadot-primitives", "scale-info", "sp-api", - "sp-runtime", - "sp-trie", + "sp-runtime 31.0.1", + "sp-trie 29.0.0", "staging-xcm", "tracing", ] @@ -4902,9 +4928,9 @@ dependencies = [ "cumulus-primitives-core", "parity-scale-codec", "scale-info", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-trie", + "sp-trie 29.0.0", ] [[package]] @@ -4912,9 +4938,9 @@ name = "cumulus-primitives-proof-size-hostfunction" version = "0.2.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ - "sp-externalities", - "sp-runtime-interface", - "sp-trie", + "sp-externalities 0.25.0", + "sp-runtime-interface 24.0.0", + "sp-trie 29.0.0", ] [[package]] @@ -4931,7 +4957,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -4955,7 +4981,7 @@ dependencies = [ "pallet-asset-conversion", "parity-scale-codec", "polkadot-runtime-common", - "sp-runtime", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -4969,9 +4995,9 @@ dependencies = [ "cumulus-primitives-core", "parity-scale-codec", "polkadot-primitives", - "sp-runtime", - "sp-state-machine", - "sp-trie", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "sp-trie 29.0.0", ] [[package]] @@ -7238,12 +7264,12 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto", - "sp-core", - "sp-io", - "sp-runtime", - "sp-runtime-interface", - "sp-storage", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-runtime-interface 24.0.0", + "sp-storage 19.0.0", "static_assertions", ] @@ -7257,8 +7283,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -7296,11 +7322,11 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", "sp-npos-elections", - "sp-runtime", - "sp-std", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -7315,10 +7341,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", - "sp-tracing", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-tracing 16.0.0", ] [[package]] @@ -7346,7 +7372,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -7356,7 +7382,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "aquamarine", "array-bytes", - "binary-merkle-tree", + "binary-merkle-tree 13.0.0", "bitflags 1.3.2", "docify", "environmental", @@ -7372,21 +7398,21 @@ dependencies = [ "serde", "serde_json", "sp-api", - "sp-arithmetic", - "sp-core", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", "sp-crypto-hashing-proc-macro", - "sp-debug-derive", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-genesis-builder", "sp-inherents", - "sp-io", + "sp-io 30.0.0", "sp-metadata-ir", - "sp-runtime", + "sp-runtime 31.0.1", "sp-staking", - "sp-state-machine", - "sp-std", - "sp-tracing", - "sp-trie", - "sp-weights", + "sp-state-machine 0.35.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-tracing 16.0.0", + "sp-trie 29.0.0", + "sp-weights 27.0.0", "tt-call", ] @@ -7444,11 +7470,11 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-version", - "sp-weights", + "sp-weights 27.0.0", ] [[package]] @@ -7461,8 +7487,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -7483,7 +7509,7 @@ dependencies = [ "frame-support", "parity-scale-codec", "sp-api", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -11472,10 +11498,10 @@ dependencies = [ "pallet-identity", "parity-scale-codec", "scale-info", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11490,10 +11516,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11508,10 +11534,10 @@ dependencies = [ "pallet-asset-conversion", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11526,7 +11552,7 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -11539,8 +11565,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11554,11 +11580,11 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -11573,8 +11599,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11589,8 +11615,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11617,7 +11643,7 @@ dependencies = [ "pallet-assets", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -11652,9 +11678,9 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-aura", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -11667,9 +11693,9 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-authority-discovery", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -11682,7 +11708,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -11699,11 +11725,11 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-babe", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-session", "sp-staking", ] @@ -11723,10 +11749,10 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", - "sp-tracing", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-tracing 16.0.0", ] [[package]] @@ -11741,8 +11767,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11759,7 +11785,7 @@ dependencies = [ "scale-info", "serde", "sp-consensus-beefy", - "sp-runtime", + "sp-runtime 31.0.1", "sp-session", "sp-staking", ] @@ -11770,7 +11796,7 @@ version = "28.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "array-bytes", - "binary-merkle-tree", + "binary-merkle-tree 13.0.0", "frame-benchmarking", "frame-support", "frame-system", @@ -11783,10 +11809,10 @@ dependencies = [ "serde", "sp-api", "sp-consensus-beefy", - "sp-core", - "sp-io", - "sp-runtime", - "sp-state-machine", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", ] [[package]] @@ -11801,9 +11827,9 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11820,8 +11846,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-consensus-grandpa", - "sp-runtime", - "sp-std", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "tracing", ] @@ -11838,9 +11864,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime", - "sp-std", - "sp-trie", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-trie 29.0.0", "tracing", ] @@ -11859,8 +11885,8 @@ dependencies = [ "pallet-bridge-grandpa", "parity-scale-codec", "scale-info", - "sp-runtime", - "sp-std", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "tracing", ] @@ -11882,8 +11908,8 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-runtime 31.0.1", "tracing", ] @@ -11900,9 +11926,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic", - "sp-core", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11918,9 +11944,9 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11938,7 +11964,7 @@ dependencies = [ "parity-scale-codec", "rand 0.8.5", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", "sp-staking", ] @@ -11954,9 +11980,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11969,8 +11995,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -11994,9 +12020,9 @@ dependencies = [ "serde", "smallvec", "sp-api", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "wasm-instrument", @@ -12023,11 +12049,11 @@ dependencies = [ "polkadot-runtime-parachains", "scale-info", "sp-api", - "sp-core", - "sp-io", - "sp-keystore", - "sp-runtime", - "sp-tracing", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", + "sp-tracing 16.0.0", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -12067,8 +12093,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12083,10 +12109,10 @@ dependencies = [ "pallet-ranked-collective", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12099,8 +12125,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12116,9 +12142,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12132,10 +12158,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -12152,8 +12178,8 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12168,10 +12194,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12187,12 +12213,12 @@ dependencies = [ "parity-scale-codec", "rand 0.8.5", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", "sp-npos-elections", - "sp-runtime", - "sp-std", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -12208,11 +12234,11 @@ dependencies = [ "parity-scale-codec", "rand 0.8.5", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", "sp-npos-elections", - "sp-runtime", + "sp-runtime 31.0.1", "strum 0.26.3", ] @@ -12226,7 +12252,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "sp-npos-elections", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -12240,10 +12266,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", + "sp-core 28.0.0", + "sp-io 30.0.0", "sp-npos-elections", - "sp-runtime", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12260,8 +12286,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12277,10 +12303,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12296,11 +12322,11 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-grandpa", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-session", "sp-staking", ] @@ -12317,8 +12343,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12333,10 +12359,10 @@ dependencies = [ "pallet-authorship", "parity-scale-codec", "scale-info", - "sp-application-crypto", - "sp-core", - "sp-io", - "sp-runtime", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12350,9 +12376,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12376,7 +12402,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -12390,9 +12416,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12407,11 +12433,11 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", - "sp-weights", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", ] [[package]] @@ -12426,10 +12452,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -12446,9 +12472,9 @@ dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12461,7 +12487,7 @@ dependencies = [ "polkadot-sdk-frame", "scale-info", "serde", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-mixnet", ] @@ -12489,9 +12515,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12530,9 +12556,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12576,11 +12602,11 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-staking", - "sp-tracing", + "sp-tracing 16.0.0", ] [[package]] @@ -12598,8 +12624,8 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "scale-info", - "sp-runtime", - "sp-runtime-interface", + "sp-runtime 31.0.1", + "sp-runtime-interface 24.0.0", "sp-staking", ] @@ -12624,7 +12650,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-runtime", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12647,7 +12673,7 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12663,10 +12689,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-application-crypto", - "sp-io", - "sp-runtime", - "sp-std", + "sp-application-crypto 30.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -12677,7 +12703,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-std", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] [[package]] @@ -12692,10 +12718,10 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12723,8 +12749,8 @@ dependencies = [ "paste", "scale-info", "serde", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12738,10 +12764,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "verifiable", ] @@ -12756,9 +12782,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12783,10 +12809,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12811,9 +12837,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-arithmetic", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12827,9 +12853,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -12870,13 +12896,13 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-arithmetic", + "sp-arithmetic 23.0.0", "sp-consensus-aura", "sp-consensus-babe", "sp-consensus-slots", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-version", "substrate-bn", "subxt-signer", @@ -12904,15 +12930,15 @@ dependencies = [ "sc-service", "serde", "serde_json", - "sp-arithmetic", - "sp-core", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io", + "sp-io 30.0.0", "sp-rpc", - "sp-runtime", - "sp-weights", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "sqlx", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "subxt", "subxt-signer", "thiserror 1.0.69", @@ -12931,8 +12957,8 @@ dependencies = [ "pallet-revive-uapi", "polkavm-linker 0.29.0", "serde_json", - "sp-core", - "sp-io", + "sp-core 28.0.0", + "sp-io 30.0.0", "toml 0.8.23", ] @@ -12972,8 +12998,8 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -12986,8 +13012,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13028,9 +13054,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", - "sp-weights", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", ] [[package]] @@ -13042,8 +13068,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13059,13 +13085,13 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-session", "sp-staking", - "sp-state-machine", - "sp-trie", + "sp-state-machine 0.35.0", + "sp-trie 29.0.0", ] [[package]] @@ -13080,7 +13106,7 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "rand 0.8.5", - "sp-runtime", + "sp-runtime 31.0.1", "sp-session", ] @@ -13093,7 +13119,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -13108,9 +13134,9 @@ dependencies = [ "parity-scale-codec", "rand_chacha 0.3.1", "scale-info", - "sp-arithmetic", - "sp-io", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13128,9 +13154,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-application-crypto", - "sp-io", - "sp-runtime", + "sp-application-crypto 30.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -13150,11 +13176,11 @@ dependencies = [ "rand_chacha 0.3.1", "scale-info", "serde", - "sp-application-crypto", - "sp-core", - "sp-io", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", "sp-npos-elections", - "sp-runtime", + "sp-runtime 31.0.1", "sp-staking", ] @@ -13173,8 +13199,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -13189,8 +13215,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "sp-staking", "staging-xcm", ] @@ -13201,7 +13227,7 @@ version = "19.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "log", - "sp-arithmetic", + "sp-arithmetic 23.0.0", ] [[package]] @@ -13220,7 +13246,7 @@ version = "19.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "log", - "sp-arithmetic", + "sp-arithmetic 23.0.0", ] [[package]] @@ -13244,9 +13270,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13260,9 +13286,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-statement-store", ] @@ -13277,8 +13303,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13294,8 +13320,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-inherents", - "sp-runtime", - "sp-storage", + "sp-runtime 31.0.1", + "sp-storage 19.0.0", "sp-timestamp", ] @@ -13312,9 +13338,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13329,8 +13355,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13341,8 +13367,8 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "sp-api", - "sp-runtime", - "sp-weights", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", ] [[package]] @@ -13359,8 +13385,8 @@ dependencies = [ "scale-info", "serde", "sp-inherents", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "sp-transaction-storage-proof", ] @@ -13379,8 +13405,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13405,7 +13431,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -13418,9 +13444,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -13433,9 +13459,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", - "sp-weights", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", ] [[package]] @@ -13449,7 +13475,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -13476,9 +13502,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -13496,8 +13522,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -13516,9 +13542,9 @@ dependencies = [ "pallet-bridge-messages", "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -13537,9 +13563,9 @@ dependencies = [ "parity-scale-codec", "polkadot-runtime-parachains", "scale-info", - "sp-core", - "sp-runtime", - "sp-std", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", "staging-xcm-builder", "tracing", @@ -13581,9 +13607,9 @@ dependencies = [ "polkadot-runtime-common", "scale-info", "sp-consensus-aura", - "sp-core", - "sp-io", - "sp-runtime", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-parachain-info", "staging-xcm", "staging-xcm-executor", @@ -13611,16 +13637,29 @@ dependencies = [ "parity-scale-codec", "polkadot-parachain-primitives", "sp-consensus-aura", - "sp-core", - "sp-io", - "sp-runtime", - "sp-tracing", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-tracing 16.0.0", "staging-parachain-info", "staging-xcm", "staging-xcm-executor", "xcm-runtime-apis", ] +[[package]] +name = "parity-bip39" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" +dependencies = [ + "bitcoin_hashes 0.13.0", + "rand 0.8.5", + "rand_core 0.6.4", + "serde", + "unicode-normalization", +] + [[package]] name = "parity-db" version = "0.4.13" @@ -14052,8 +14091,20 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "parity-scale-codec", "scale-info", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", +] + +[[package]] +name = "polkadot-core-primitives" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85331e6e8c215034748a5afa4d985c4bc74e17a6704123749570591ddc2ac6c" +dependencies = [ + "parity-scale-codec", + "scale-info", + "sp-core 37.0.0", + "sp-runtime 42.0.0", ] [[package]] @@ -14065,12 +14116,12 @@ dependencies = [ "bounded-collections 0.3.2", "derive_more 0.99.20", "parity-scale-codec", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "scale-info", "serde", - "sp-core", - "sp-runtime", - "sp-weights", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", ] [[package]] @@ -14083,22 +14134,22 @@ dependencies = [ "hex-literal", "log", "parity-scale-codec", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "polkadot-parachain-primitives", "scale-info", "serde", "sp-api", - "sp-application-crypto", - "sp-arithmetic", + "sp-application-crypto 30.0.0", + "sp-arithmetic 23.0.0", "sp-authority-discovery", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-io", - "sp-keystore", - "sp-runtime", + "sp-io 30.0.0", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", "sp-staking", - "sp-std", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "thiserror 1.0.69", ] @@ -14137,12 +14188,12 @@ dependencies = [ "serde", "slot-range-helper", "sp-api", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-io", + "sp-io 30.0.0", "sp-keyring", "sp-npos-elections", - "sp-runtime", + "sp-runtime 31.0.1", "sp-session", "sp-staking", "staging-xcm", @@ -14160,7 +14211,7 @@ dependencies = [ "frame-benchmarking", "parity-scale-codec", "polkadot-primitives", - "sp-tracing", + "sp-tracing 16.0.0", ] [[package]] @@ -14187,7 +14238,7 @@ dependencies = [ "pallet-staking", "pallet-timestamp", "parity-scale-codec", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "polkadot-parachain-primitives", "polkadot-primitives", "polkadot-runtime-metrics", @@ -14196,16 +14247,16 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto", - "sp-arithmetic", - "sp-core", + "sp-application-crypto 30.0.0", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", "sp-inherents", - "sp-io", - "sp-keystore", - "sp-runtime", + "sp-io 30.0.0", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", "sp-session", "sp-staking", - "sp-std", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", "staging-xcm-executor", ] @@ -14217,7 +14268,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "asset-test-utils", "assets-common", - "binary-merkle-tree", + "binary-merkle-tree 13.0.0", "bp-header-chain", "bp-messages", "bp-parachains", @@ -14383,7 +14434,7 @@ dependencies = [ "pallet-xcm-precompiles", "parachains-common", "parachains-runtimes-test-utils", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "polkadot-parachain-primitives", "polkadot-primitives", "polkadot-runtime-common", @@ -14421,8 +14472,8 @@ dependencies = [ "slot-range-helper", "sp-api", "sp-api-proc-macro", - "sp-application-crypto", - "sp-arithmetic", + "sp-application-crypto 30.0.0", + "sp-arithmetic 23.0.0", "sp-authority-discovery", "sp-block-builder", "sp-blockchain", @@ -14433,50 +14484,50 @@ dependencies = [ "sp-consensus-grandpa", "sp-consensus-pow", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-core-hashing", "sp-core-hashing-proc-macro", "sp-crypto-ec-utils", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-crypto-hashing-proc-macro", "sp-database", - "sp-debug-derive", - "sp-externalities", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-externalities 0.25.0", "sp-genesis-builder", "sp-inherents", - "sp-io", + "sp-io 30.0.0", "sp-keyring", - "sp-keystore", + "sp-keystore 0.34.0", "sp-metadata-ir", "sp-mixnet", "sp-mmr-primitives", "sp-npos-elections", "sp-offchain", - "sp-panic-handler", + "sp-panic-handler 13.0.0", "sp-rpc", - "sp-runtime", - "sp-runtime-interface", - "sp-runtime-interface-proc-macro", + "sp-runtime 31.0.1", + "sp-runtime-interface 24.0.0", + "sp-runtime-interface-proc-macro 17.0.0", "sp-session", "sp-staking", - "sp-state-machine", + "sp-state-machine 0.35.0", "sp-statement-store", - "sp-std", - "sp-storage", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-storage 19.0.0", "sp-timestamp", - "sp-tracing", + "sp-tracing 16.0.0", "sp-transaction-pool", "sp-transaction-storage-proof", - "sp-trie", + "sp-trie 29.0.0", "sp-version", "sp-version-proc-macro", - "sp-wasm-interface", - "sp-weights", + "sp-wasm-interface 20.0.0", + "sp-weights 27.0.0", "staging-parachain-info", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", - "substrate-bip39", + "substrate-bip39 0.4.7", "substrate-frame-rpc-support", "substrate-frame-rpc-system", "substrate-rpc-client", @@ -14503,19 +14554,19 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-arithmetic", + "sp-arithmetic 23.0.0", "sp-block-builder", "sp-consensus-aura", "sp-consensus-grandpa", - "sp-core", + "sp-core 28.0.0", "sp-genesis-builder", "sp-inherents", - "sp-io", + "sp-io 30.0.0", "sp-keyring", "sp-offchain", - "sp-runtime", + "sp-runtime 31.0.1", "sp-session", - "sp-storage", + "sp-storage 19.0.0", "sp-transaction-pool", "sp-version", ] @@ -14564,6 +14615,12 @@ dependencies = [ "log", ] +[[package]] +name = "polkavm-common" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed9e5af472f729fcf3b3c1cf17508ddbb3505259dd6e2ee0fb5a29e105d22" + [[package]] name = "polkavm-common" version = "0.26.0" @@ -14585,6 +14642,15 @@ dependencies = [ "polkavm-assembler 0.29.0", ] +[[package]] +name = "polkavm-derive" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "176144f8661117ea95fa7cf868c9a62d6b143e8a2ebcb7582464c3faade8669a" +dependencies = [ + "polkavm-derive-impl-macro 0.24.0", +] + [[package]] name = "polkavm-derive" version = "0.26.0" @@ -14603,6 +14669,18 @@ dependencies = [ "polkavm-derive-impl-macro 0.29.0", ] +[[package]] +name = "polkavm-derive-impl" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5a21844afdfcc10c92b9ef288ccb926211af27478d1730fcd55e4aec710179d" +dependencies = [ + "polkavm-common 0.24.0", + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "polkavm-derive-impl" version = "0.26.0" @@ -14627,6 +14705,16 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "polkavm-derive-impl-macro" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba0ef0f17ad81413ea1ca5b1b67553aedf5650c88269b673d3ba015c83bc2651" +dependencies = [ + "polkavm-derive-impl 0.24.0", + "syn 2.0.104", +] + [[package]] name = "polkavm-derive-impl-macro" version = "0.26.0" @@ -16070,9 +16158,9 @@ dependencies = [ "polkadot-primitives", "polkadot-runtime-common", "smallvec", - "sp-core", - "sp-runtime", - "sp-weights", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "staging-xcm", "staging-xcm-builder", ] @@ -16580,8 +16668,8 @@ version = "23.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "log", - "sp-core", - "sp-wasm-interface", + "sp-core 28.0.0", + "sp-wasm-interface 20.0.0", "thiserror 1.0.69", ] @@ -16600,11 +16688,11 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-runtime", - "sp-trie", - "substrate-prometheus-endpoint", + "sp-runtime 31.0.1", + "sp-trie 29.0.0", + "substrate-prometheus-endpoint 0.17.0", ] [[package]] @@ -16616,10 +16704,10 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-runtime", - "sp-trie", + "sp-runtime 31.0.1", + "sp-trie 29.0.0", ] [[package]] @@ -16639,13 +16727,13 @@ dependencies = [ "serde", "serde_json", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-genesis-builder", - "sp-io", - "sp-runtime", - "sp-state-machine", - "sp-tracing", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "sp-tracing 16.0.0", ] [[package]] @@ -16691,11 +16779,11 @@ dependencies = [ "serde", "serde_json", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-keyring", - "sp-keystore", - "sp-panic-handler", - "sp-runtime", + "sp-keystore 0.34.0", + "sp-panic-handler 13.0.0", + "sp-runtime 31.0.1", "sp-version", "thiserror 1.0.69", "tokio", @@ -16717,14 +16805,14 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core", + "sp-core 28.0.0", "sp-database", - "sp-externalities", - "sp-runtime", - "sp-state-machine", - "sp-storage", - "sp-trie", - "substrate-prometheus-endpoint", + "sp-externalities 0.25.0", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "sp-storage 19.0.0", + "sp-trie 29.0.0", + "substrate-prometheus-endpoint 0.17.0", ] [[package]] @@ -16744,14 +16832,14 @@ dependencies = [ "sc-client-api", "sc-state-db", "schnellru", - "sp-arithmetic", + "sp-arithmetic 23.0.0", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-database", - "sp-runtime", - "sp-state-machine", - "sp-trie", - "substrate-prometheus-endpoint", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "sp-trie 29.0.0", + "substrate-prometheus-endpoint 0.17.0", "sysinfo", ] @@ -16771,10 +16859,10 @@ dependencies = [ "serde", "sp-blockchain", "sp-consensus", - "sp-core", - "sp-runtime", - "sp-state-machine", - "substrate-prometheus-endpoint", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", ] @@ -16795,17 +16883,17 @@ dependencies = [ "sc-consensus-slots", "sc-telemetry", "sp-api", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-block-builder", "sp-blockchain", "sp-consensus", "sp-consensus-aura", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-keystore", - "sp-runtime", - "substrate-prometheus-endpoint", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", ] @@ -16830,19 +16918,19 @@ dependencies = [ "sc-telemetry", "sc-transaction-pool-api", "sp-api", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-block-builder", "sp-blockchain", "sp-consensus", "sp-consensus-babe", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-inherents", - "sp-keystore", - "sp-runtime", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", "sp-timestamp", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", ] @@ -16856,7 +16944,7 @@ dependencies = [ "sc-client-api", "sc-consensus", "sp-blockchain", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -16885,12 +16973,12 @@ dependencies = [ "sp-consensus-aura", "sp-consensus-babe", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-keystore", - "sp-runtime", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", "sp-timestamp", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", ] @@ -16907,14 +16995,14 @@ dependencies = [ "sc-client-api", "sc-consensus", "sc-telemetry", - "sp-arithmetic", + "sp-arithmetic 23.0.0", "sp-blockchain", "sp-consensus", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-runtime", - "sp-state-machine", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", ] [[package]] @@ -16929,14 +17017,14 @@ dependencies = [ "sc-executor-wasmtime", "schnellru", "sp-api", - "sp-core", - "sp-externalities", - "sp-io", - "sp-panic-handler", - "sp-runtime-interface", - "sp-trie", + "sp-core 28.0.0", + "sp-externalities 0.25.0", + "sp-io 30.0.0", + "sp-panic-handler 13.0.0", + "sp-runtime-interface 24.0.0", + "sp-trie 29.0.0", "sp-version", - "sp-wasm-interface", + "sp-wasm-interface 20.0.0", "tracing", ] @@ -16948,7 +17036,7 @@ dependencies = [ "polkavm 0.26.0", "sc-allocator", "sp-maybe-compressed-blob", - "sp-wasm-interface", + "sp-wasm-interface 20.0.0", "thiserror 1.0.69", "wasm-instrument", ] @@ -16961,7 +17049,7 @@ dependencies = [ "log", "polkavm 0.26.0", "sc-executor-common", - "sp-wasm-interface", + "sp-wasm-interface 20.0.0", ] [[package]] @@ -16975,8 +17063,8 @@ dependencies = [ "rustix 1.0.8", "sc-allocator", "sc-executor-common", - "sp-runtime-interface", - "sp-wasm-interface", + "sp-runtime-interface 24.0.0", + "sp-wasm-interface 20.0.0", "wasmtime", ] @@ -16993,7 +17081,7 @@ dependencies = [ "sc-network", "sc-network-sync", "sp-blockchain", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -17004,9 +17092,9 @@ dependencies = [ "array-bytes", "parking_lot 0.12.4", "serde_json", - "sp-application-crypto", - "sp-core", - "sp-keystore", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", + "sp-keystore 0.34.0", "thiserror 1.0.69", ] @@ -17031,10 +17119,10 @@ dependencies = [ "sc-transaction-pool-api", "sp-api", "sp-consensus", - "sp-core", - "sp-keystore", + "sp-core 28.0.0", + "sp-keystore 0.34.0", "sp-mixnet", - "sp-runtime", + "sp-runtime 31.0.1", "thiserror 1.0.69", ] @@ -17074,11 +17162,11 @@ dependencies = [ "serde", "serde_json", "smallvec", - "sp-arithmetic", + "sp-arithmetic 23.0.0", "sp-blockchain", - "sp-core", - "sp-runtime", - "substrate-prometheus-endpoint", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17095,7 +17183,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "bitflags 1.3.2", "parity-scale-codec", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -17114,8 +17202,8 @@ dependencies = [ "sc-network", "sc-network-types", "sp-blockchain", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "thiserror 1.0.69", ] @@ -17142,13 +17230,13 @@ dependencies = [ "sc-utils", "schnellru", "smallvec", - "sp-arithmetic", + "sp-arithmetic 23.0.0", "sp-blockchain", "sp-consensus", "sp-consensus-grandpa", - "sp-core", - "sp-runtime", - "substrate-prometheus-endpoint", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17169,8 +17257,8 @@ dependencies = [ "sc-network-types", "sc-utils", "sp-consensus", - "sp-runtime", - "substrate-prometheus-endpoint", + "sp-runtime 31.0.1", + "substrate-prometheus-endpoint 0.17.0", ] [[package]] @@ -17200,7 +17288,7 @@ version = "0.17.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "log", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", ] [[package]] @@ -17224,11 +17312,11 @@ dependencies = [ "serde_json", "sp-api", "sp-blockchain", - "sp-core", - "sp-keystore", + "sp-core 28.0.0", + "sp-keystore 0.34.0", "sp-offchain", "sp-rpc", - "sp-runtime", + "sp-runtime 31.0.1", "sp-session", "sp-statement-store", "sp-version", @@ -17248,9 +17336,9 @@ dependencies = [ "scale-info", "serde", "serde_json", - "sp-core", + "sp-core 28.0.0", "sp-rpc", - "sp-runtime", + "sp-runtime 31.0.1", "sp-version", "thiserror 1.0.69", ] @@ -17273,7 +17361,7 @@ dependencies = [ "sc-rpc-api", "serde", "serde_json", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "tokio", "tower 0.4.13", "tower-http 0.5.2", @@ -17302,11 +17390,11 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-rpc", - "sp-runtime", + "sp-runtime 31.0.1", "sp-version", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17320,10 +17408,10 @@ dependencies = [ "parity-scale-codec", "sc-executor", "sc-executor-common", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-state-machine", - "sp-wasm-interface", + "sp-state-machine 0.35.0", + "sp-wasm-interface 20.0.0", "thiserror 1.0.69", ] @@ -17371,19 +17459,19 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core", - "sp-externalities", - "sp-keystore", - "sp-runtime", + "sp-core 28.0.0", + "sp-externalities 0.25.0", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", "sp-session", - "sp-state-machine", - "sp-storage", + "sp-state-machine 0.35.0", + "sp-storage 19.0.0", "sp-transaction-pool", "sp-transaction-storage-proof", - "sp-trie", + "sp-trie 29.0.0", "sp-version", "static_init", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "tempfile", "thiserror 1.0.69", "tokio", @@ -17399,7 +17487,7 @@ dependencies = [ "log", "parity-scale-codec", "parking_lot 0.12.4", - "sp-core", + "sp-core 28.0.0", ] [[package]] @@ -17417,9 +17505,9 @@ dependencies = [ "sc-telemetry", "serde", "serde_json", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io", + "sp-io 30.0.0", ] [[package]] @@ -17459,10 +17547,10 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-rpc", - "sp-runtime", - "sp-tracing", + "sp-runtime 31.0.1", + "sp-tracing 16.0.0", "thiserror 1.0.69", "tracing", "tracing-log", @@ -17499,12 +17587,12 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-runtime", - "sp-tracing", + "sp-runtime 31.0.1", + "sp-tracing 16.0.0", "sp-transaction-pool", - "substrate-prometheus-endpoint", + "substrate-prometheus-endpoint 0.17.0", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17523,8 +17611,8 @@ dependencies = [ "parity-scale-codec", "serde", "sp-blockchain", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "thiserror 1.0.69", ] @@ -17539,7 +17627,7 @@ dependencies = [ "log", "parking_lot 0.12.4", "prometheus", - "sp-arithmetic", + "sp-arithmetic 23.0.0", ] [[package]] @@ -18441,7 +18529,7 @@ dependencies = [ "enumn", "parity-scale-codec", "paste", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -18629,11 +18717,11 @@ dependencies = [ "polkadot-parachain-primitives", "scale-info", "serde", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -18861,13 +18949,13 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api-proc-macro", - "sp-core", - "sp-externalities", + "sp-core 28.0.0", + "sp-externalities 0.25.0", "sp-metadata-ir", - "sp-runtime", - "sp-runtime-interface", - "sp-state-machine", - "sp-trie", + "sp-runtime 31.0.1", + "sp-runtime-interface 24.0.0", + "sp-state-machine 0.35.0", + "sp-trie 29.0.0", "sp-version", "thiserror 1.0.69", ] @@ -18894,8 +18982,21 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-io", + "sp-core 28.0.0", + "sp-io 30.0.0", +] + +[[package]] +name = "sp-application-crypto" +version = "41.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28c668f1ce424bc131f40ade33fa4c0bd4dcd2428479e1e291aad66d4b00c74f" +dependencies = [ + "parity-scale-codec", + "scale-info", + "serde", + "sp-core 37.0.0", + "sp-io 41.0.1", ] [[package]] @@ -18912,6 +19013,21 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "sp-arithmetic" +version = "27.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2929fd12ac6ca3cfac7f62885866810ba4e9464814dbaa87592b5b5681b29aee" +dependencies = [ + "docify", + "integer-sqrt", + "num-traits", + "parity-scale-codec", + "scale-info", + "serde", + "static_assertions", +] + [[package]] name = "sp-authority-discovery" version = "26.0.0" @@ -18920,8 +19036,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto", - "sp-runtime", + "sp-application-crypto 30.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -18931,7 +19047,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "sp-api", "sp-inherents", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -18945,10 +19061,10 @@ dependencies = [ "schnellru", "sp-api", "sp-consensus", - "sp-core", + "sp-core 28.0.0", "sp-database", - "sp-runtime", - "sp-state-machine", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", "thiserror 1.0.69", "tracing", ] @@ -18962,8 +19078,8 @@ dependencies = [ "futures", "log", "sp-inherents", - "sp-runtime", - "sp-state-machine", + "sp-runtime 31.0.1", + "sp-state-machine 0.35.0", "thiserror 1.0.69", ] @@ -18976,10 +19092,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-slots", "sp-inherents", - "sp-runtime", + "sp-runtime 31.0.1", "sp-timestamp", ] @@ -18993,11 +19109,11 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto", + "sp-application-crypto 30.0.0", "sp-consensus-slots", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-runtime", + "sp-runtime 31.0.1", "sp-timestamp", ] @@ -19010,14 +19126,14 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto", - "sp-core", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io", - "sp-keystore", + "sp-io 30.0.0", + "sp-keystore 0.34.0", "sp-mmr-primitives", - "sp-runtime", - "sp-weights", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "strum 0.26.3", ] @@ -19032,10 +19148,10 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto", - "sp-core", - "sp-keystore", - "sp-runtime", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", ] [[package]] @@ -19045,8 +19161,8 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "parity-scale-codec", "sp-api", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -19095,12 +19211,61 @@ dependencies = [ "serde", "sha2 0.10.9", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-debug-derive", - "sp-externalities", - "sp-std", - "sp-storage", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-externalities 0.25.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-storage 19.0.0", "ss58-registry", - "substrate-bip39", + "substrate-bip39 0.4.7", + "thiserror 1.0.69", + "tracing", + "w3f-bls", + "zeroize", +] + +[[package]] +name = "sp-core" +version = "37.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e1a46a6b2323401e4489184846a7fb7d89091b42602a2391cd3ef652ede2850" +dependencies = [ + "ark-vrf", + "array-bytes", + "bitflags 1.3.2", + "blake2 0.10.6", + "bounded-collections 0.2.4", + "bs58", + "dyn-clone", + "ed25519-zebra", + "futures", + "hash-db", + "hash256-std-hasher", + "impl-serde", + "itertools 0.11.0", + "k256", + "libsecp256k1", + "log", + "merlin", + "parity-bip39", + "parity-scale-codec", + "parking_lot 0.12.4", + "paste", + "primitive-types 0.13.1", + "rand 0.8.5", + "scale-info", + "schnorrkel 0.11.5", + "secp256k1 0.28.2", + "secrecy 0.8.0", + "serde", + "sha2 0.10.9", + "sp-crypto-hashing 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-debug-derive 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-externalities 0.30.0", + "sp-runtime-interface 30.0.0", + "sp-std 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-storage 22.0.0", + "ss58-registry", + "substrate-bip39 0.6.0", "thiserror 1.0.69", "tracing", "w3f-bls", @@ -19140,7 +19305,7 @@ dependencies = [ "ark-ed-on-bls12-381-bandersnatch 0.4.0", "ark-ed-on-bls12-381-bandersnatch-ext", "ark-scale 0.0.12", - "sp-runtime-interface", + "sp-runtime-interface 24.0.0", ] [[package]] @@ -19189,6 +19354,17 @@ dependencies = [ "parking_lot 0.12.4", ] +[[package]] +name = "sp-debug-derive" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d09fa0a5f7299fb81ee25ae3853d26200f7a348148aed6de76be905c007dbe" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "sp-debug-derive" version = "14.0.0" @@ -19206,7 +19382,18 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "environmental", "parity-scale-codec", - "sp-storage", + "sp-storage 19.0.0", +] + +[[package]] +name = "sp-externalities" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30cbf059dce180a8bf8b6c8b08b6290fa3d1c7f069a60f1df038ab5dd5fc0ba6" +dependencies = [ + "environmental", + "parity-scale-codec", + "sp-storage 22.0.0", ] [[package]] @@ -19218,7 +19405,7 @@ dependencies = [ "scale-info", "serde_json", "sp-api", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -19230,7 +19417,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", "thiserror 1.0.69", ] @@ -19248,14 +19435,41 @@ dependencies = [ "polkavm-derive 0.26.0", "rustversion", "secp256k1 0.28.2", - "sp-core", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-externalities", - "sp-keystore", - "sp-runtime-interface", - "sp-state-machine", - "sp-tracing", - "sp-trie", + "sp-externalities 0.25.0", + "sp-keystore 0.34.0", + "sp-runtime-interface 24.0.0", + "sp-state-machine 0.35.0", + "sp-tracing 16.0.0", + "sp-trie 29.0.0", + "tracing", + "tracing-core", +] + +[[package]] +name = "sp-io" +version = "41.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3f244e9a2818d21220ceb0915ac73a462814a92d0c354a124a818abdb7f4f66" +dependencies = [ + "bytes", + "docify", + "ed25519-dalek", + "libsecp256k1", + "log", + "parity-scale-codec", + "polkavm-derive 0.24.0", + "rustversion", + "secp256k1 0.28.2", + "sp-core 37.0.0", + "sp-crypto-hashing 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-externalities 0.30.0", + "sp-keystore 0.43.0", + "sp-runtime-interface 30.0.0", + "sp-state-machine 0.46.0", + "sp-tracing 17.1.0", + "sp-trie 40.0.0", "tracing", "tracing-core", ] @@ -19265,8 +19479,8 @@ name = "sp-keyring" version = "31.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", "strum 0.26.3", ] @@ -19277,8 +19491,20 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", - "sp-core", - "sp-externalities", + "sp-core 28.0.0", + "sp-externalities 0.25.0", +] + +[[package]] +name = "sp-keystore" +version = "0.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "269d0ee360f6d072f9203485afea35583ac151521a525cc48b2a107fc576c2d9" +dependencies = [ + "parity-scale-codec", + "parking_lot 0.12.4", + "sp-core 37.0.0", + "sp-externalities 0.30.0", ] [[package]] @@ -19308,7 +19534,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto", + "sp-application-crypto 30.0.0", ] [[package]] @@ -19322,9 +19548,9 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-core", - "sp-debug-derive", - "sp-runtime", + "sp-core 28.0.0", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime 31.0.1", "thiserror 1.0.69", ] @@ -19336,9 +19562,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-arithmetic", - "sp-core", - "sp-runtime", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -19347,8 +19573,8 @@ version = "26.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "sp-api", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -19360,6 +19586,16 @@ dependencies = [ "regex", ] +[[package]] +name = "sp-panic-handler" +version = "13.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8b52e69a577cbfdea62bfaf16f59eb884422ce98f78b5cd8d9bf668776bced1" +dependencies = [ + "backtrace", + "regex", +] + [[package]] name = "sp-rpc" version = "26.0.0" @@ -19367,7 +19603,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "rustc-hash 1.1.0", "serde", - "sp-core", + "sp-core 28.0.0", ] [[package]] @@ -19375,7 +19611,7 @@ name = "sp-runtime" version = "31.0.1" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ - "binary-merkle-tree", + "binary-merkle-tree 13.0.0", "bytes", "docify", "either", @@ -19389,13 +19625,43 @@ dependencies = [ "scale-info", "serde", "simple-mermaid", - "sp-application-crypto", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-std", - "sp-trie", - "sp-weights", + "sp-application-crypto 30.0.0", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-trie 29.0.0", + "sp-weights 27.0.0", + "tracing", + "tuplex", +] + +[[package]] +name = "sp-runtime" +version = "42.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b25d4d3811410317175ff121b3ff8c8b723504dadf37cd418b5192a5098d11bf" +dependencies = [ + "binary-merkle-tree 16.1.0", + "docify", + "either", + "hash256-std-hasher", + "impl-trait-for-tuples", + "log", + "num-traits", + "parity-scale-codec", + "paste", + "rand 0.8.5", + "scale-info", + "serde", + "simple-mermaid", + "sp-application-crypto 41.0.0", + "sp-arithmetic 27.0.0", + "sp-core 37.0.0", + "sp-io 41.0.1", + "sp-std 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-trie 40.0.0", + "sp-weights 32.0.0", "tracing", "tuplex", ] @@ -19409,12 +19675,32 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "polkavm-derive 0.26.0", - "sp-externalities", - "sp-runtime-interface-proc-macro", - "sp-std", - "sp-storage", - "sp-tracing", - "sp-wasm-interface", + "sp-externalities 0.25.0", + "sp-runtime-interface-proc-macro 17.0.0", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-storage 19.0.0", + "sp-tracing 16.0.0", + "sp-wasm-interface 20.0.0", + "static_assertions", +] + +[[package]] +name = "sp-runtime-interface" +version = "30.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fcd9c219da8c85d45d5ae1ce80e73863a872ac27424880322903c6ac893c06e" +dependencies = [ + "bytes", + "impl-trait-for-tuples", + "parity-scale-codec", + "polkavm-derive 0.24.0", + "primitive-types 0.13.1", + "sp-externalities 0.30.0", + "sp-runtime-interface-proc-macro 19.0.0", + "sp-std 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-storage 22.0.0", + "sp-tracing 17.1.0", + "sp-wasm-interface 22.0.0", "static_assertions", ] @@ -19431,6 +19717,20 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "sp-runtime-interface-proc-macro" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca35431af10a450787ebfdcb6d7a91c23fa91eafe73a3f9d37db05c9ab36154b" +dependencies = [ + "Inflector", + "expander", + "proc-macro-crate 3.3.0", + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "sp-session" version = "27.0.0" @@ -19439,9 +19739,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core", - "sp-keystore", - "sp-runtime", + "sp-core 28.0.0", + "sp-keystore 0.34.0", + "sp-runtime 31.0.1", "sp-staking", ] @@ -19454,8 +19754,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -19469,10 +19769,31 @@ dependencies = [ "parking_lot 0.12.4", "rand 0.8.5", "smallvec", - "sp-core", - "sp-externalities", - "sp-panic-handler", - "sp-trie", + "sp-core 28.0.0", + "sp-externalities 0.25.0", + "sp-panic-handler 13.0.0", + "sp-trie 29.0.0", + "thiserror 1.0.69", + "tracing", + "trie-db", +] + +[[package]] +name = "sp-state-machine" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "483422b016ee9ddba949db6d3092961ed58526520f0586df74dc07defd922a58" +dependencies = [ + "hash-db", + "log", + "parity-scale-codec", + "parking_lot 0.12.4", + "rand 0.8.5", + "smallvec", + "sp-core 37.0.0", + "sp-externalities 0.30.0", + "sp-panic-handler 13.0.2", + "sp-trie 40.0.0", "thiserror 1.0.69", "tracing", "trie-db", @@ -19492,16 +19813,22 @@ dependencies = [ "scale-info", "sha2 0.10.9", "sp-api", - "sp-application-crypto", - "sp-core", + "sp-application-crypto 30.0.0", + "sp-core 28.0.0", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-externalities", - "sp-runtime", - "sp-runtime-interface", + "sp-externalities 0.25.0", + "sp-runtime 31.0.1", + "sp-runtime-interface 24.0.0", "thiserror 1.0.69", "x25519-dalek", ] +[[package]] +name = "sp-std" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f8ee986414b0a9ad741776762f4083cd3a5128449b982a3919c4df36874834" + [[package]] name = "sp-std" version = "14.0.0" @@ -19516,7 +19843,20 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", +] + +[[package]] +name = "sp-storage" +version = "22.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee3b70ca340e41cde9d2e069d354508a6e37a6573d66f7cc38f11549002f64ec" +dependencies = [ + "impl-serde", + "parity-scale-codec", + "ref-cast", + "serde", + "sp-debug-derive 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -19527,7 +19867,7 @@ dependencies = [ "async-trait", "parity-scale-codec", "sp-inherents", - "sp-runtime", + "sp-runtime 31.0.1", "thiserror 1.0.69", ] @@ -19543,13 +19883,25 @@ dependencies = [ "tracing-subscriber 0.3.20", ] +[[package]] +name = "sp-tracing" +version = "17.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6147a5b8c98b9ed4bf99dc033fab97a468b4645515460974c8784daeb7c35433" +dependencies = [ + "parity-scale-codec", + "tracing", + "tracing-core", + "tracing-subscriber 0.3.20", +] + [[package]] name = "sp-transaction-pool" version = "26.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" dependencies = [ "sp-api", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -19560,10 +19912,10 @@ dependencies = [ "async-trait", "parity-scale-codec", "scale-info", - "sp-core", + "sp-core 28.0.0", "sp-inherents", - "sp-runtime", - "sp-trie", + "sp-runtime 31.0.1", + "sp-trie 29.0.0", ] [[package]] @@ -19582,9 +19934,35 @@ dependencies = [ "rand 0.8.5", "scale-info", "schnellru", - "sp-core", - "sp-externalities", - "substrate-prometheus-endpoint", + "sp-core 28.0.0", + "sp-externalities 0.25.0", + "substrate-prometheus-endpoint 0.17.0", + "thiserror 1.0.69", + "tracing", + "trie-db", + "trie-root", +] + +[[package]] +name = "sp-trie" +version = "40.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b2e157c9cf44a1a9d20f3c69322e302db70399bf3f218211387fe009dd4041c" +dependencies = [ + "ahash", + "foldhash 0.1.5", + "hash-db", + "hashbrown 0.15.4", + "memory-db", + "nohash-hasher", + "parity-scale-codec", + "parking_lot 0.12.4", + "rand 0.8.5", + "scale-info", + "schnellru", + "sp-core 37.0.0", + "sp-externalities 0.30.0", + "substrate-prometheus-endpoint 0.17.7", "thiserror 1.0.69", "tracing", "trie-db", @@ -19602,8 +19980,8 @@ dependencies = [ "scale-info", "serde", "sp-crypto-hashing-proc-macro", - "sp-runtime", - "sp-std", + "sp-runtime 31.0.1", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-version-proc-macro", "thiserror 1.0.69", ] @@ -19632,6 +20010,18 @@ dependencies = [ "wasmtime", ] +[[package]] +name = "sp-wasm-interface" +version = "22.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdbc579c72fc03263894a0077383f543a093020d75741092511bb05a440ada6" +dependencies = [ + "anyhow", + "impl-trait-for-tuples", + "log", + "parity-scale-codec", +] + [[package]] name = "sp-weights" version = "27.0.0" @@ -19642,8 +20032,23 @@ dependencies = [ "scale-info", "serde", "smallvec", - "sp-arithmetic", - "sp-debug-derive", + "sp-arithmetic 23.0.0", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", +] + +[[package]] +name = "sp-weights" +version = "32.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8a1d448faceb064bb114df31fc45ff86ea2ee8fd17810c4357a578d081f7732" +dependencies = [ + "bounded-collections 0.2.4", + "parity-scale-codec", + "scale-info", + "serde", + "smallvec", + "sp-arithmetic 27.0.0", + "sp-debug-derive 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -19922,7 +20327,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -19940,8 +20345,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-runtime", - "sp-weights", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "tracing", "xcm-procedural", ] @@ -19960,11 +20365,11 @@ dependencies = [ "parity-scale-codec", "polkadot-parachain-primitives", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", - "sp-weights", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "staging-xcm", "staging-xcm-executor", "tracing", @@ -19981,11 +20386,11 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-runtime", - "sp-weights", + "sp-arithmetic 23.0.0", + "sp-core 28.0.0", + "sp-io 30.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "staging-xcm", "tracing", ] @@ -20166,6 +20571,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "substrate-bip39" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca58ffd742f693dc13d69bdbb2e642ae239e0053f6aab3b104252892f856700a" +dependencies = [ + "hmac 0.12.1", + "pbkdf2 0.12.2", + "schnorrkel 0.11.5", + "sha2 0.10.9", + "zeroize", +] + [[package]] name = "substrate-bn" version = "0.6.0" @@ -20190,7 +20608,7 @@ dependencies = [ "sc-rpc-api", "scale-info", "serde", - "sp-storage", + "sp-storage 19.0.0", ] [[package]] @@ -20209,8 +20627,8 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-blockchain", - "sp-core", - "sp-runtime", + "sp-core 28.0.0", + "sp-runtime 31.0.1", ] [[package]] @@ -20227,6 +20645,21 @@ dependencies = [ "tokio", ] +[[package]] +name = "substrate-prometheus-endpoint" +version = "0.17.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23e4bc8e910a312820d589047ab683928b761242dbe31dee081fbdb37cbe0be" +dependencies = [ + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "log", + "prometheus", + "thiserror 1.0.69", + "tokio", +] + [[package]] name = "substrate-rpc-client" version = "0.33.0" @@ -20237,7 +20670,7 @@ dependencies = [ "log", "sc-rpc-api", "serde", - "sp-runtime", + "sp-runtime 31.0.1", ] [[package]] @@ -20797,10 +21230,10 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514 dependencies = [ "cumulus-primitives-core", "frame-support", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "rococo-runtime-constants", "smallvec", - "sp-runtime", + "sp-runtime 31.0.1", "staging-xcm", "westend-runtime-constants", ] @@ -22672,9 +23105,9 @@ dependencies = [ "polkadot-primitives", "polkadot-runtime-common", "smallvec", - "sp-core", - "sp-runtime", - "sp-weights", + "sp-core 28.0.0", + "sp-runtime 31.0.1", + "sp-weights 27.0.0", "staging-xcm", "staging-xcm-builder", ] @@ -23364,7 +23797,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-weights", + "sp-weights 27.0.0", "staging-xcm", "staging-xcm-executor", ] @@ -23378,13 +23811,13 @@ dependencies = [ "frame-system", "parity-scale-codec", "paste", - "polkadot-core-primitives", + "polkadot-core-primitives 7.0.0", "polkadot-parachain-primitives", "polkadot-primitives", "polkadot-runtime-parachains", "scale-info", - "sp-io", - "sp-runtime", + "sp-io 30.0.0", + "sp-runtime 31.0.1", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", diff --git a/crates/anvil-polkadot/Cargo.toml b/crates/anvil-polkadot/Cargo.toml index cc845db6cf96e..49a6636dea8b7 100644 --- a/crates/anvil-polkadot/Cargo.toml +++ b/crates/anvil-polkadot/Cargo.toml @@ -129,6 +129,9 @@ rand_08.workspace = true eyre.workspace = true lru = "0.16.0" indexmap = "2.0" +log = { version = "0.4.21", default-features = false } +hex-literal = { version = "0.4.1", default-features = false } +polkadot-core-primitives = "18.0.0" # cli clap = { version = "4", features = [ diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs new file mode 100644 index 0000000000000..da68b05c9d672 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -0,0 +1,1874 @@ +use super::{ + rpc_client::RPCClient, +}; +use polkadot_sdk::{ + sc_client_api::{ + StorageKey, TrieCacheContext, UsageInfo, + backend::{self, NewBlockState}, + blockchain::{self, BlockStatus, HeaderBackend}, + leaves::LeafSet, + }, + sc_service::Error, + sp_blockchain::{self, CachedHeaderMetadata, HeaderMetadata}, + sp_core::{self, H256, offchain::storage::InMemOffchainStorage, storage::well_known_keys}, + sp_runtime::{ + Justification, Justifications, StateVersion, Storage, + generic::BlockId, + traits::{Block as BlockT, HashingFor, Header as HeaderT, NumberFor, Zero}, + }, + sp_state_machine::{ + self, BackendTransaction, ChildStorageCollection, IndexOperation, StorageCollection, + TrieBackend, + }, + sp_storage::{self, ChildInfo}, + sp_trie::{self, PrefixedMemoryDB}, +}; +use serde::de::DeserializeOwned; +use std::{ + collections::{HashMap, HashSet}, + marker::PhantomData, + ptr, + sync::Arc, +}; +use alloy_primitives::hex; + +struct PendingBlock { + block: StoredBlock, + state: NewBlockState, +} + +#[derive(PartialEq, Eq, Clone)] +enum StoredBlock { + Header(B::Header, Option), + Full(B, Option), +} + +impl StoredBlock { + fn new( + header: B::Header, + body: Option>, + just: Option, + ) -> Self { + match body { + Some(body) => StoredBlock::Full(B::new(header, body), just), + None => StoredBlock::Header(header, just), + } + } + + fn header(&self) -> &B::Header { + match *self { + StoredBlock::Header(ref h, _) => h, + StoredBlock::Full(ref b, _) => b.header(), + } + } + + fn justifications(&self) -> Option<&Justifications> { + match *self { + StoredBlock::Header(_, ref j) | StoredBlock::Full(_, ref j) => j.as_ref(), + } + } + + fn extrinsics(&self) -> Option<&[B::Extrinsic]> { + match *self { + StoredBlock::Header(_, _) => None, + StoredBlock::Full(ref b, _) => Some(b.extrinsics()), + } + } + + fn into_inner(self) -> (B::Header, Option>, Option) { + match self { + StoredBlock::Header(header, just) => (header, None, just), + StoredBlock::Full(block, just) => { + let (header, body) = block.deconstruct(); + (header, Some(body), just) + } + } + } +} + +#[derive(Clone)] +struct BlockchainStorage { + blocks: HashMap>, + hashes: HashMap, Block::Hash>, + best_hash: Block::Hash, + best_number: NumberFor, + finalized_hash: Block::Hash, + finalized_number: NumberFor, + genesis_hash: Block::Hash, + header_cht_roots: HashMap, Block::Hash>, + leaves: LeafSet>, + aux: HashMap, Vec>, +} + +/// In-memory blockchain. Supports concurrent reads. +#[derive(Clone)] +pub struct Blockchain { + rpc_client: Option>>, + storage: Arc>>, +} + +impl Blockchain { + /// Create new in-memory blockchain storage. + fn new(rpc_client: Option>>) -> Blockchain { + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "🏗️ Creating new Blockchain storage (empty)" + ); + + let storage = Arc::new(parking_lot::RwLock::new(BlockchainStorage { + blocks: HashMap::new(), + hashes: HashMap::new(), + best_hash: Default::default(), + best_number: Zero::zero(), + finalized_hash: Default::default(), + finalized_number: Zero::zero(), + genesis_hash: Default::default(), + header_cht_roots: HashMap::new(), + leaves: LeafSet::new(), + aux: HashMap::new(), + })); + Blockchain { rpc_client, storage } + } + #[inline] + fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } + + /// Get header hash of given block. + pub fn id(&self, id: BlockId) -> Option { + match id { + BlockId::Hash(h) => Some(h), + BlockId::Number(n) => { + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "Looking up block hash for number={}", n + ); + + let block_hash = self.storage.read().hashes.get(&n).cloned(); + + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "Lookup result: number={}, found={}, total_hashes={}", + n, + block_hash.is_some(), + self.storage.read().hashes.len() + ); + + match block_hash { + None => { + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "Block hash not found locally, trying RPC for number={}", n + ); + let block_hash = + self.rpc().and_then(|rpc| rpc.block_hash(Some(n)).ok().flatten()); + if let Some(h) = block_hash { + self.storage.write().hashes.insert(n, h); + } + block_hash + } + block_hash => block_hash, + } + } + } + } + + /// Insert a block header and associated data. + pub fn insert( + &self, + hash: Block::Hash, + header: ::Header, + justifications: Option, + body: Option::Extrinsic>>, + new_state: NewBlockState, + ) -> sp_blockchain::Result<()> { + let number = *header.number(); + + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "Inserting block: number={}, hash={:?}, new_state={:?}", + number, + hash, + new_state + ); + + if new_state.is_best() { + self.apply_head(&header)?; + } + + let mut storage = self.storage.write(); + + // Always insert the block into blocks and hashes storage + storage.blocks.insert(hash, StoredBlock::new(header.clone(), body, justifications)); + storage.hashes.insert(number, hash); + + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "Block inserted successfully: number={}, hash={:?}. Total blocks={}, Total hashes={}", + number, + hash, + storage.blocks.len(), + storage.hashes.len() + ); + + // Set genesis_hash only for the first block inserted + if storage.blocks.len() == 1 { + storage.genesis_hash = hash; + } + + // Update leaves for non-genesis blocks + if storage.blocks.len() > 1 { + storage.leaves.import(hash, number, *header.parent_hash()); + } + + // Finalize block only if explicitly requested via new_state + if let NewBlockState::Final = new_state { + storage.finalized_hash = hash; + storage.finalized_number = number; + } + + Ok(()) + } + + /// Get total number of blocks. + pub fn blocks_count(&self) -> usize { + let count = self.storage.read().blocks.len(); + + log::debug!( + target: super::LAZY_LOADING_LOG_TARGET, + "Total number of blocks: {:?}", + count + ); + + count + } + + /// Compare this blockchain with another in-mem blockchain + pub fn equals_to(&self, other: &Self) -> bool { + // Check ptr equality first to avoid double read locks. + if ptr::eq(self, other) { + return true; + } + self.canon_equals_to(other) && self.storage.read().blocks == other.storage.read().blocks + } + + /// Compare canonical chain to other canonical chain. + pub fn canon_equals_to(&self, other: &Self) -> bool { + // Check ptr equality first to avoid double read locks. + if ptr::eq(self, other) { + return true; + } + let this = self.storage.read(); + let other = other.storage.read(); + this.hashes == other.hashes + && this.best_hash == other.best_hash + && this.best_number == other.best_number + && this.genesis_hash == other.genesis_hash + } + + /// Insert header CHT root. + pub fn insert_cht_root(&self, block: NumberFor, cht_root: Block::Hash) { + self.storage.write().header_cht_roots.insert(block, cht_root); + } + + /// Set an existing block as head. + pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { + let header = self + .header(hash)? + .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{}", hash)))?; + + self.apply_head(&header) + } + + fn apply_head(&self, header: &::Header) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + + let hash = header.hash(); + let number = header.number(); + + storage.best_hash = hash; + storage.best_number = *number; + storage.hashes.insert(*number, hash); + + Ok(()) + } + + fn finalize_header( + &self, + block: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + storage.finalized_hash = block; + + if justification.is_some() { + let block = storage + .blocks + .get_mut(&block) + .expect("hash was fetched from a block in the db; qed"); + + let block_justifications = match block { + StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, + }; + + *block_justifications = justification.map(Justifications::from); + } + + Ok(()) + } + + fn append_justification( + &self, + hash: Block::Hash, + justification: Justification, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + + let block = + storage.blocks.get_mut(&hash).expect("hash was fetched from a block in the db; qed"); + + let block_justifications = match block { + StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, + }; + + if let Some(stored_justifications) = block_justifications { + if !stored_justifications.append(justification) { + return Err(sp_blockchain::Error::BadJustification( + "Duplicate consensus engine ID".into(), + )); + } + } else { + *block_justifications = Some(Justifications::from(justification)); + }; + + Ok(()) + } + + fn write_aux(&self, ops: Vec<(Vec, Option>)>) { + let mut storage = self.storage.write(); + for (k, v) in ops { + match v { + Some(v) => storage.aux.insert(k, v), + None => storage.aux.remove(&k), + }; + } + } +} + +impl HeaderBackend for Blockchain { + fn header( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Header>> { + // First, try to get the header from local storage + if let Some(header) = self.storage.read().blocks.get(&hash).map(|b| b.header().clone()) { + return Ok(Some(header)); + } + + // If not found in local storage, fetch from RPC client + let header = if let Some(rpc) = self.rpc() { + rpc.block(Some(hash)).ok().flatten().map(|full| { + let block = full.block.clone(); + self.storage + .write() + .blocks + .insert(hash, StoredBlock::Full(block.clone(), full.justifications)); + block.header().clone() + }) + } else { + None + }; + + if header.is_none() { + log::warn!( + target: super::LAZY_LOADING_LOG_TARGET, + "Expected block {:x?} to exist.", + &hash + ); + } + + Ok(header) + } + + fn info(&self) -> blockchain::Info { + let storage = self.storage.read(); + // Return None for finalized_state when blockchain is empty or only has genesis block + // This allows Client::new to properly initialize and complete genesis setup + // finalized_state should only be Some() when there are blocks beyond genesis + let finalized_state = if storage.blocks.len() <= 1 { + None + } else { + Some((storage.finalized_hash, storage.finalized_number)) + }; + + log::info!( + target: super::LAZY_LOADING_LOG_TARGET, + "📊 Blockchain::info() - blocks={}, best_hash={:?}, best_number={}, genesis_hash={:?}, finalized_hash={:?}, finalized_number={}, finalized_state={:?}", + storage.blocks.len(), + storage.best_hash, + storage.best_number, + storage.genesis_hash, + storage.finalized_hash, + storage.finalized_number, + finalized_state + ); + + blockchain::Info { + best_hash: storage.best_hash, + best_number: storage.best_number, + genesis_hash: storage.genesis_hash, + finalized_hash: storage.finalized_hash, + finalized_number: storage.finalized_number, + finalized_state, + number_leaves: storage.leaves.count(), + block_gap: None, + } + } + + fn status(&self, hash: Block::Hash) -> sp_blockchain::Result { + match self.storage.read().blocks.contains_key(&hash) { + true => Ok(BlockStatus::InChain), + false => Ok(BlockStatus::Unknown), + } + } + + fn number(&self, hash: Block::Hash) -> sp_blockchain::Result>> { + if let Some(b) = self.storage.read().blocks.get(&hash) { + return Ok(Some(*b.header().number())); + } + match self.rpc() { + Some(rpc) => match rpc.block(Some(hash)) { + Ok(Some(block)) => Ok(Some(*block.block.header().number())), + err => Err(sp_blockchain::Error::UnknownBlock( + format!("Failed to fetch block number from RPC: {:?}", err).into(), + )), + }, + None => Err(sp_blockchain::Error::UnknownBlock( + "RPC not configured to resolve block number".into(), + )), + } + } + + fn hash( + &self, + number: <::Header as HeaderT>::Number, + ) -> sp_blockchain::Result> { + Ok(self.id(BlockId::Number(number))) + } +} + +impl HeaderMetadata for Blockchain { + type Error = sp_blockchain::Error; + + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { + self.header(hash)?.map(|header| CachedHeaderMetadata::from(&header)).ok_or_else(|| { + sp_blockchain::Error::UnknownBlock(format!("header not found: {}", hash)) + }) + } + + fn insert_header_metadata(&self, _hash: Block::Hash, _metadata: CachedHeaderMetadata) { + // No need to implement. + unimplemented!("insert_header_metadata") + } + fn remove_header_metadata(&self, _hash: Block::Hash) { + // No need to implement. + unimplemented!("remove_header_metadata") + } +} + +impl blockchain::Backend for Blockchain { + fn body( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Extrinsic>>> { + if let Some(xs) = + self.storage.read().blocks.get(&hash).and_then(|b| b.extrinsics().map(|x| x.to_vec())) + { + return Ok(Some(xs)); + } + let extrinsics = self.rpc().and_then(|rpc| { + rpc.block(Some(hash)).ok().flatten().map(|b| b.block.extrinsics().to_vec()) + }); + Ok(extrinsics) + } + + fn justifications(&self, hash: Block::Hash) -> sp_blockchain::Result> { + Ok(self.storage.read().blocks.get(&hash).and_then(|b| b.justifications().cloned())) + } + + fn last_finalized(&self) -> sp_blockchain::Result { + let last_finalized = self.storage.read().finalized_hash; + + Ok(last_finalized) + } + + fn leaves(&self) -> sp_blockchain::Result> { + let leaves = self.storage.read().leaves.hashes(); + + Ok(leaves) + } + + fn children(&self, _parent_hash: Block::Hash) -> sp_blockchain::Result> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } + + fn indexed_transaction(&self, _hash: Block::Hash) -> sp_blockchain::Result>> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } + + fn block_indexed_body( + &self, + _hash: Block::Hash, + ) -> sp_blockchain::Result>>> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } +} + +impl backend::AuxStore for Blockchain { + fn insert_aux< + 'a, + 'b: 'a, + 'c: 'a, + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + for (k, v) in insert { + storage.aux.insert(k.to_vec(), v.to_vec()); + } + for k in delete { + storage.aux.remove(*k); + } + Ok(()) + } + + fn get_aux(&self, key: &[u8]) -> sp_blockchain::Result>> { + Ok(self.storage.read().aux.get(key).cloned()) + } +} + +pub struct BlockImportOperation { + pending_block: Option>, + old_state: ForkedLazyBackend, + new_state: Option>>, + aux: Vec<(Vec, Option>)>, + storage_updates: StorageCollection, + finalized_blocks: Vec<(Block::Hash, Option)>, + set_head: Option, + pub(crate) before_fork: bool, +} + +impl BlockImportOperation { + fn apply_storage( + &mut self, + storage: Storage, + commit: bool, + state_version: StateVersion, + ) -> sp_blockchain::Result { + use sp_state_machine::Backend; + check_genesis_storage(&storage)?; + + let child_delta = storage.children_default.values().map(|child_content| { + ( + &child_content.child_info, + child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + ) + }); + + let (root, transaction) = self.old_state.full_storage_root( + storage.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + child_delta, + state_version, + ); + + if commit { + self.new_state = Some(transaction); + self.storage_updates = + storage + .top + .iter() + .map(|(k, v)| { + if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } + }) + .collect(); + } + Ok(root) + } +} + +impl backend::BlockImportOperation + for BlockImportOperation +{ + type State = ForkedLazyBackend; + + fn state(&self) -> sp_blockchain::Result> { + Ok(Some(&self.old_state)) + } + + fn set_block_data( + &mut self, + header: ::Header, + body: Option::Extrinsic>>, + _indexed_body: Option>>, + justifications: Option, + state: NewBlockState, + ) -> sp_blockchain::Result<()> { + assert!(self.pending_block.is_none(), "Only one block per operation is allowed"); + self.pending_block = + Some(PendingBlock { block: StoredBlock::new(header, body, justifications), state }); + Ok(()) + } + + fn update_db_storage( + &mut self, + update: BackendTransaction>, + ) -> sp_blockchain::Result<()> { + self.new_state = Some(update); + Ok(()) + } + + fn set_genesis_state( + &mut self, + storage: Storage, + commit: bool, + state_version: StateVersion, + ) -> sp_blockchain::Result { + self.apply_storage(storage, commit, state_version) + } + + fn reset_storage( + &mut self, + storage: Storage, + state_version: StateVersion, + ) -> sp_blockchain::Result { + self.apply_storage(storage, true, state_version) + } + + fn insert_aux(&mut self, ops: I) -> sp_blockchain::Result<()> + where + I: IntoIterator, Option>)>, + { + self.aux.append(&mut ops.into_iter().collect()); + Ok(()) + } + + fn update_storage( + &mut self, + update: StorageCollection, + _child_update: ChildStorageCollection, + ) -> sp_blockchain::Result<()> { + self.storage_updates = update.clone(); + Ok(()) + } + + fn mark_finalized( + &mut self, + hash: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + self.finalized_blocks.push((hash, justification)); + Ok(()) + } + + fn mark_head(&mut self, hash: Block::Hash) -> sp_blockchain::Result<()> { + assert!(self.pending_block.is_none(), "Only one set block per operation is allowed"); + self.set_head = Some(hash); + Ok(()) + } + + fn update_transaction_index( + &mut self, + _index: Vec, + ) -> sp_blockchain::Result<()> { + Ok(()) + } + + fn set_create_gap(&mut self, _create_gap: bool) { + // This implementation can be left empty or implemented as needed + // For now, we're just implementing the trait method with no functionality + } +} + +/// DB-backed patricia trie state, transaction type is an overaay of changes to commit. +pub type DbState = TrieBackend>>, HashingFor>; + +/// A struct containing arguments for iterating over the storage. +#[derive(Default)] +pub struct RawIterArgs { + /// The prefix of the keys over which to iterate. + pub prefix: Option>, + + /// The prefix from which to start the iteration from. + /// + /// This is inclusive and the iteration will include the key which is specified here. + pub start_at: Option>, + + /// If this is `true` then the iteration will *not* include + /// the key specified in `start_at`, if there is such a key. + pub start_at_exclusive: bool, +} + +/// A raw iterator over the `BenchmarkingState`. +pub struct RawIter { + pub(crate) args: RawIterArgs, + complete: bool, + _phantom: PhantomData, +} + +impl sp_state_machine::StorageIterator> + for RawIter +{ + type Backend = ForkedLazyBackend; + type Error = String; + + fn next_key( + &mut self, + backend: &Self::Backend, + ) -> Option> { + use sp_state_machine::Backend; + + let remote_fetch = + |key: Option, start_key: Option, block: Option| { + backend.rpc().and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()).and_then(|keys| keys.first().map(|key| key.clone())) + }; + + let prefix = self.args.prefix.clone().map(|k| StorageKey(k)); + let start_key = self.args.start_at.clone().map(|k| StorageKey(k)); + + let maybe_next_key = if backend.before_fork { + // If RPC client is available, fetch remotely + if backend.rpc().is_some() { + remote_fetch(prefix, start_key, backend.block_hash) + } else { + // No RPC client, use local DB + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.start_at = self.args.start_at.as_deref(); + iter_args.start_at_exclusive = true; + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten() + } + } else { + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.start_at = self.args.start_at.as_deref(); + iter_args.start_at_exclusive = true; + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + let next_storage_key = readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten(); + + // IMPORTANT: free storage read lock + drop(readable_db); + + let removed_key = start_key + .clone() + .or(prefix.clone()) + .map(|key| backend.removed_keys.read().contains_key(&key.0)) + .unwrap_or(false); + if next_storage_key.is_none() && !removed_key { + let maybe_next_key = if backend.rpc().is_some() { + remote_fetch(prefix, start_key, Some(backend.fork_block)) + } else { + None + }; + match maybe_next_key { + Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), + _ => None, + } + } else { + next_storage_key + } + }; + + log::trace!( + target: super::LAZY_LOADING_LOG_TARGET, + "next_key: (prefix: {:?}, start_at: {:?}, next_key: {:?})", + self.args.prefix.clone().map(|key| hex::encode(key)), + self.args.start_at.clone().map(|key| hex::encode(key)), + maybe_next_key.clone().map(|key| hex::encode(key)) + ); + + if let Some(next_key) = maybe_next_key { + if self + .args + .prefix + .clone() + .map(|filter_key| next_key.starts_with(&filter_key)) + .unwrap_or(false) + { + self.args.start_at = Some(next_key.clone()); + Some(Ok(next_key)) + } else { + self.complete = true; + None + } + } else { + self.complete = true; + None + } + } + + fn next_pair( + &mut self, + backend: &Self::Backend, + ) -> Option> + { + use sp_state_machine::Backend; + + let remote_fetch = + |key: Option, start_key: Option, block: Option| { + backend.rpc().and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()).and_then(|keys| keys.first().map(|key| key.clone())) + }; + + let prefix = self.args.prefix.clone().map(|k| StorageKey(k)); + let start_key = self.args.start_at.clone().map(|k| StorageKey(k)); + + let maybe_next_key = if backend.before_fork { + // If RPC client is available, fetch remotely + if backend.rpc().is_some() { + remote_fetch(prefix, start_key, backend.block_hash) + } else { + // No RPC client, use local DB + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.start_at = self.args.start_at.as_deref(); + iter_args.start_at_exclusive = true; + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten() + } + } else { + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.start_at = self.args.start_at.as_deref(); + iter_args.start_at_exclusive = true; + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + let next_storage_key = readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten(); + + // IMPORTANT: free storage read lock + drop(readable_db); + + let removed_key = start_key + .clone() + .or(prefix.clone()) + .map(|key| backend.removed_keys.read().contains_key(&key.0)) + .unwrap_or(false); + if next_storage_key.is_none() && !removed_key { + let maybe_next_key = if backend.rpc().is_some() { + remote_fetch(prefix, start_key, Some(backend.fork_block)) + } else { + None + }; + match maybe_next_key { + Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), + _ => None, + } + } else { + next_storage_key + } + }; + + log::trace!( + target: super::LAZY_LOADING_LOG_TARGET, + "next_pair: (prefix: {:?}, start_at: {:?}, next_key: {:?})", + self.args.prefix.clone().map(|key| hex::encode(key)), + self.args.start_at.clone().map(|key| hex::encode(key)), + maybe_next_key.clone().map(|key| hex::encode(key)) + ); + + let maybe_value = maybe_next_key + .clone() + .and_then(|key| (*backend).storage(key.as_slice()).ok()) + .flatten(); + + if let Some(next_key) = maybe_next_key { + if self + .args + .prefix + .clone() + .map(|filter_key| next_key.starts_with(&filter_key)) + .unwrap_or(false) + { + self.args.start_at = Some(next_key.clone()); + + match maybe_value { + Some(value) => Some(Ok((next_key, value))), + _ => None, + } + } else { + self.complete = true; + None + } + } else { + self.complete = true; + None + } + } + + fn was_complete(&self) -> bool { + self.complete + } +} + +#[derive(Debug, Clone)] +pub struct ForkedLazyBackend { + rpc_client: Option>>, + block_hash: Option, + fork_block: Block::Hash, + pub(crate) db: Arc>>>, + pub(crate) removed_keys: Arc, ()>>>, + before_fork: bool, +} + +impl ForkedLazyBackend { + fn update_storage(&self, key: &[u8], value: &Option>) { + if let Some(val) = value { + let mut entries: HashMap, StorageCollection> = Default::default(); + entries.insert(None, vec![(key.to_vec(), Some(val.clone()))]); + + self.db.write().insert(entries, StateVersion::V1); + } + } + + #[inline] + fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } +} + +impl sp_state_machine::Backend> + for ForkedLazyBackend +{ + type Error = as sp_state_machine::Backend>>::Error; + type TrieBackendStorage = PrefixedMemoryDB>; + type RawIter = RawIter; + + fn storage(&self, key: &[u8]) -> Result, Self::Error> { + let remote_fetch = |block: Option| -> Option> { + self.rpc() + .and_then(|rpc| rpc.storage(StorageKey(key.to_vec()), block).ok()) + .flatten() + .map(|v| v.0) + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + // No RPC client, try to read from local DB + // This allows reading genesis state that has been committed + let readable_db = self.db.read(); + return Ok(readable_db.storage(key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_storage = readable_db.storage(key); + let value = match maybe_storage { + Ok(Some(data)) => Some(data), + _ if !self.removed_keys.read().contains_key(key) => { + // Only try remote fetch if RPC client is available + let result = if self.rpc().is_some() { + remote_fetch(Some(self.fork_block)) + } else { + None + }; + + // Cache state + drop(readable_db); + self.update_storage(key, &result); + + result + } + _ => None, + }; + + Ok(value) + } + + fn storage_hash( + &self, + key: &[u8], + ) -> Result as sp_core::Hasher>::Out>, Self::Error> { + let remote_fetch = |block: Option| -> Result< + Option< as sp_core::Hasher>::Out>, + Self::Error, + > { + match self.rpc() { + Some(rpc) => rpc + .storage_hash(StorageKey(key.to_vec()), block) + .map_err(|e| format!("Failed to fetch storage hash from RPC: {:?}", e).into()), + None => Ok(None), + } + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return remote_fetch(self.block_hash); + } else { + // No RPC client, try to read from local DB + return Ok(self.db.read().storage_hash(key).ok().flatten()); + } + } + + let storage_hash = self.db.read().storage_hash(key); + match storage_hash { + Ok(Some(hash)) => Ok(Some(hash)), + _ if !self.removed_keys.read().contains_key(key) => { + if self.rpc().is_some() { + remote_fetch(Some(self.fork_block)) + } else { + Ok(None) + } + } + _ => Ok(None), + } + } + + fn closest_merkle_value( + &self, + _key: &[u8], + ) -> Result< + Option as sp_core::Hasher>::Out>>, + Self::Error, + > { + unimplemented!("closest_merkle_value: unsupported feature for lazy loading") + } + + fn child_closest_merkle_value( + &self, + _child_info: &sp_storage::ChildInfo, + _key: &[u8], + ) -> Result< + Option as sp_core::Hasher>::Out>>, + Self::Error, + > { + unimplemented!("child_closest_merkle_value: unsupported feature for lazy loading") + } + + fn child_storage( + &self, + _child_info: &sp_storage::ChildInfo, + _key: &[u8], + ) -> Result, Self::Error> { + unimplemented!("child_storage: unsupported feature for lazy loading"); + } + + fn child_storage_hash( + &self, + _child_info: &sp_storage::ChildInfo, + _key: &[u8], + ) -> Result as sp_core::Hasher>::Out>, Self::Error> { + unimplemented!("child_storage_hash: unsupported feature for lazy loading"); + } + + fn next_storage_key( + &self, + key: &[u8], + ) -> Result, Self::Error> { + let remote_fetch = |block: Option| { + let start_key = Some(StorageKey(key.to_vec())); + self.rpc().and_then(|rpc| rpc.storage_keys_paged(start_key.clone(), 2, None, block).ok()).and_then(|keys| keys.last().map(|key| key.clone())) + }; + + let maybe_next_key = if self.before_fork { + // Before the fork checkpoint, try RPC first, then fall back to local DB + if self.rpc().is_some() { + remote_fetch(self.block_hash) + } else { + // No RPC client, try local DB + self.db.read().next_storage_key(key).ok().flatten() + } + } else { + // Try to get the next storage key from the local DB + let next_storage_key = self.db.read().next_storage_key(key); + match next_storage_key { + Ok(Some(next_key)) => Some(next_key), + // If not found locally and key is not marked as removed, fetch remotely + _ if !self.removed_keys.read().contains_key(key) => { + if self.rpc().is_some() { + remote_fetch(Some(self.fork_block)) + } else { + None + } + } + // Otherwise, there's no next key + _ => None, + } + } + .filter(|next_key| next_key != key); + + log::trace!( + target: super::LAZY_LOADING_LOG_TARGET, + "next_storage_key: (key: {:?}, next_key: {:?})", + hex::encode(key), + maybe_next_key.clone().map(|key| hex::encode(key)) + ); + + Ok(maybe_next_key) + } + + fn next_child_storage_key( + &self, + _child_info: &sp_storage::ChildInfo, + _key: &[u8], + ) -> Result, Self::Error> { + unimplemented!("next_child_storage_key: unsupported feature for lazy loading"); + } + + fn storage_root<'a>( + &self, + delta: impl Iterator)>, + state_version: StateVersion, + ) -> ( as sp_core::Hasher>::Out, BackendTransaction>) + where + as sp_core::Hasher>::Out: Ord, + { + self.db.read().storage_root(delta, state_version) + } + + fn child_storage_root<'a>( + &self, + _child_info: &sp_storage::ChildInfo, + _delta: impl Iterator)>, + _state_version: StateVersion, + ) -> ( as sp_core::Hasher>::Out, bool, BackendTransaction>) + where + as sp_core::Hasher>::Out: Ord, + { + unimplemented!("child_storage_root: unsupported in lazy loading") + } + + fn raw_iter(&self, args: sp_state_machine::IterArgs<'_>) -> Result { + let mut clone: RawIterArgs = Default::default(); + clone.start_at_exclusive = args.start_at_exclusive.clone(); + clone.prefix = args.prefix.map(|v| v.to_vec()); + clone.start_at = args.start_at.map(|v| v.to_vec()); + + Ok(RawIter:: { args: clone, complete: false, _phantom: Default::default() }) + } + + fn register_overlay_stats(&self, stats: &sp_state_machine::StateMachineStats) { + self.db.read().register_overlay_stats(stats) + } + + fn usage_info(&self) -> sp_state_machine::UsageInfo { + self.db.read().usage_info() + } +} + +impl sp_state_machine::backend::AsTrieBackend> + for ForkedLazyBackend +{ + type TrieBackendStorage = PrefixedMemoryDB>; + + fn as_trie_backend( + &self, + ) -> &sp_state_machine::TrieBackend> { + unimplemented!("`as_trie_backend` is not supported in lazy loading mode.") + } +} + +/// Lazy loading (In-memory) backend. Keeps all states and blocks in memory. +pub struct Backend { + pub(crate) rpc_client: Option>>, + pub(crate) fork_checkpoint: Block::Header, + states: parking_lot::RwLock>>, + pub(crate) blockchain: Blockchain, + import_lock: parking_lot::RwLock<()>, + pinned_blocks: parking_lot::RwLock>, +} + +impl Backend { + fn new(rpc_client: Option>>, fork_checkpoint: Block::Header) -> Self { + Backend { + rpc_client: rpc_client.clone(), + states: Default::default(), + blockchain: Blockchain::new(rpc_client), + import_lock: Default::default(), + pinned_blocks: Default::default(), + fork_checkpoint, + } + } + + #[inline] + pub fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } +} + +impl backend::AuxStore for Backend { + fn insert_aux< + 'a, + 'b: 'a, + 'c: 'a, + I: IntoIterator, + D: IntoIterator, + >( + &self, + _insert: I, + _delete: D, + ) -> sp_blockchain::Result<()> { + unimplemented!("`insert_aux` is not supported in lazy loading mode.") + } + + fn get_aux(&self, _key: &[u8]) -> sp_blockchain::Result>> { + unimplemented!("`get_aux` is not supported in lazy loading mode.") + } +} + +impl backend::Backend for Backend { + type BlockImportOperation = BlockImportOperation; + type Blockchain = Blockchain; + type State = ForkedLazyBackend; + type OffchainStorage = InMemOffchainStorage; + + fn begin_operation(&self) -> sp_blockchain::Result { + let old_state = self.state_at(Default::default(), TrieCacheContext::Trusted)?; + Ok(BlockImportOperation { + pending_block: None, + old_state, + new_state: None, + aux: Default::default(), + storage_updates: Default::default(), + finalized_blocks: Default::default(), + set_head: None, + before_fork: false, + }) + } + + fn begin_state_operation( + &self, + operation: &mut Self::BlockImportOperation, + block: Block::Hash, + ) -> sp_blockchain::Result<()> { + operation.old_state = self.state_at(block, TrieCacheContext::Trusted)?; + Ok(()) + } + + fn commit_operation(&self, operation: Self::BlockImportOperation) -> sp_blockchain::Result<()> { + for (block, justification) in operation.finalized_blocks { + self.blockchain.finalize_header(block, justification)?; + } + + if let Some(pending_block) = operation.pending_block { + let old_state = &operation.old_state; + let (header, body, justification) = pending_block.block.into_inner(); + let hash = header.hash(); + + let new_removed_keys = old_state.removed_keys.clone(); + for (key, value) in operation.storage_updates.clone() { + if value.is_some() { + new_removed_keys.write().remove(&key.clone()); + } else { + new_removed_keys.write().insert(key.clone(), ()); + } + } + + let new_db = old_state.db.clone(); + new_db + .write() + .insert(vec![(None::, operation.storage_updates)], StateVersion::V1); + let new_state = ForkedLazyBackend { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash.clone()), + fork_block: self.fork_checkpoint.hash(), + db: new_db, + removed_keys: new_removed_keys, + before_fork: operation.before_fork, + }; + self.states.write().insert(hash, new_state); + + self.blockchain.insert(hash, header, justification, body, pending_block.state)?; + } + + if !operation.aux.is_empty() { + self.blockchain.write_aux(operation.aux); + } + + if let Some(set_head) = operation.set_head { + self.blockchain.set_head(set_head)?; + } + + Ok(()) + } + + fn finalize_block( + &self, + hash: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + self.blockchain.finalize_header(hash, justification) + } + + fn append_justification( + &self, + hash: Block::Hash, + justification: Justification, + ) -> sp_blockchain::Result<()> { + self.blockchain.append_justification(hash, justification) + } + + fn blockchain(&self) -> &Self::Blockchain { + &self.blockchain + } + + fn usage_info(&self) -> Option { + None + } + + fn offchain_storage(&self) -> Option { + None + } + + fn state_at( + &self, + hash: Block::Hash, + _trie_cache_context: TrieCacheContext, + ) -> sp_blockchain::Result { + if hash == Default::default() { + return Ok(ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: self.fork_checkpoint.hash(), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + }); + } + + let (backend, should_write) = + self.states.read().get(&hash).cloned().map(|state| Ok((state, false))).unwrap_or_else( + || { + self.rpc().and_then(|rpc| rpc.header(Some(hash)).ok()).flatten() + .ok_or(sp_blockchain::Error::UnknownBlock( + format!("Failed to fetch block header: {:?}", hash).into(), + )) + .map(|header| { + let checkpoint = self.fork_checkpoint.clone(); + let state = if header.number().gt(checkpoint.number()) { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: checkpoint.hash(), + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, + } + } else { + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: checkpoint.hash(), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + } + }; + + (state, true) + }) + }, + )?; + + if should_write { + self.states.write().insert(hash, backend.clone()); + } + + Ok(backend) + } + + fn revert( + &self, + _n: NumberFor, + _revert_finalized: bool, + ) -> sp_blockchain::Result<(NumberFor, HashSet)> { + Ok((Zero::zero(), HashSet::new())) + } + + fn remove_leaf_block(&self, _hash: Block::Hash) -> sp_blockchain::Result<()> { + Ok(()) + } + + fn get_import_lock(&self) -> &parking_lot::RwLock<()> { + &self.import_lock + } + + fn requires_full_sync(&self) -> bool { + false + } + + fn pin_block(&self, hash: ::Hash) -> blockchain::Result<()> { + let mut blocks = self.pinned_blocks.write(); + *blocks.entry(hash).or_default() += 1; + Ok(()) + } + + fn unpin_block(&self, hash: ::Hash) { + let mut blocks = self.pinned_blocks.write(); + blocks.entry(hash).and_modify(|counter| *counter -= 1).or_insert(-1); + } +} + +impl backend::LocalBackend for Backend {} + +/// Check that genesis storage is valid. +pub fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { + if storage.top.iter().any(|(k, _)| well_known_keys::is_child_storage_key(k)) { + return Err(sp_blockchain::Error::InvalidState); + } + + if storage + .children_default + .keys() + .any(|child_key| !well_known_keys::is_child_storage_key(child_key)) + { + return Err(sp_blockchain::Error::InvalidState); + } + + Ok(()) +} + +pub fn new_backend( + rpc_client: Option>>, + checkpoint: Block::Header, +) -> Result>, Error> +where + Block: BlockT + DeserializeOwned, + Block::Hash: From, +{ + let backend = Arc::new(Backend::new(rpc_client, checkpoint)); + Ok(backend) +} + +#[cfg(test)] +mod tests { + use super::*; + use mock_rpc::{RPC, TestBlock, TestHeader}; + use polkadot_sdk::{ + sc_client_api::{Backend as BackendT, StateBackend}, + sp_runtime::{ + OpaqueExtrinsic, + traits::{BlakeTwo256, Header as HeaderT}, + }, + sp_state_machine::StorageIterator, + sp_storage::StorageData, + }; + use std::{ + collections::BTreeMap, + sync::atomic::{AtomicUsize, Ordering}, + }; + + mod mock_rpc { + use super::*; + use crate::substrate_node::lazy_loading::rpc_client; + use polkadot_sdk::sp_runtime::{ + generic::{Block as GenericBlock, Header, SignedBlock}, + traits::Header as HeaderT, + }; + + pub type TestHashing = BlakeTwo256; + pub type TestHeader = Header; + pub type TestExtrinsic = OpaqueExtrinsic; + pub type TestBlock = GenericBlock, TestExtrinsic>; + + #[derive(Default, Debug)] + pub struct Counters { + pub storage_calls: AtomicUsize, + pub storage_hash_calls: AtomicUsize, + pub storage_keys_paged_calls: AtomicUsize, + pub header_calls: AtomicUsize, + pub block_calls: AtomicUsize, + pub block_hash_calls: AtomicUsize, + } + + /// Mockable RPC with interior mutability. + #[derive(Clone, Default, Debug)] + pub struct RPC { + pub counters: std::sync::Arc, + /// storage[(block_hash, key)] = value + pub storage: std::sync::Arc< + parking_lot::RwLock>, + >, + /// storage_hash[(block_hash, key)] = hash + pub storage_hashes: std::sync::Arc< + parking_lot::RwLock>, + >, + /// storage_keys_paged[(block_hash, (prefix,start))] = Vec + pub storage_keys_pages: std::sync::Arc< + parking_lot::RwLock), Vec>>, + >, + /// headers[hash] = header + pub headers: std::sync::Arc>>, + /// blocks[hash] = SignedBlock + pub blocks: + std::sync::Arc>>>, + /// block_hash_by_number[n] = hash + pub block_hash_by_number: + std::sync::Arc>>, + } + + impl RPC { + pub fn new() -> Self { + Self { + counters: std::sync::Arc::new(Counters::default()), + storage: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + storage_hashes: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + storage_keys_pages: std::sync::Arc::new(parking_lot::RwLock::new( + BTreeMap::new(), + )), + headers: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + blocks: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + block_hash_by_number: std::sync::Arc::new(parking_lot::RwLock::new( + BTreeMap::new(), + )), + } + } + + pub fn put_storage(&self, at: Block::Hash, key: StorageKey, val: StorageData) { + self.storage.write().insert((at, key), val); + } + pub fn put_storage_keys_page( + &self, + at: Block::Hash, + prefix: Vec, + keys: Vec, + ) { + self.storage_keys_pages.write().insert((at, prefix), keys); + } + pub fn put_header(&self, h: Block::Header) { + self.headers.write().insert(h.hash(), h); + } + pub fn put_block(&self, block: Block, just: Option) { + let full = SignedBlock { block, justifications: just }; + self.blocks.write().insert(full.block.header().hash(), full); + } + } + + impl RPCClient for RPC { + fn storage( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_calls.fetch_add(1, Ordering::Relaxed); + let map = self.storage.read(); + Ok(map.get(&(at.unwrap_or_default(), key.clone())).cloned()) + } + + fn storage_hash( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_hash_calls.fetch_add(1, Ordering::Relaxed); + let bh = at.unwrap_or_default(); + let map = self.storage_hashes.read(); + Ok(map.get(&(bh, key.clone())).cloned()) + } + + fn storage_keys_paged( + &self, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> + { + self.counters.storage_keys_paged_calls.fetch_add(1, Ordering::Relaxed); + + use std::cmp::min; + + let bh = at.unwrap_or_default(); + let prefix = key.map(|k| k.0).unwrap_or_default(); // ✅ usar el prefix correcto + let start = start_key.map(|k| k.0); + + let map = self.storage_keys_pages.read(); + let mut all = map.get(&(bh, prefix.clone())).cloned().unwrap_or_default(); + + // Asegurar orden determinista (lexicográfico por bytes) + all.sort_by(|a, b| a.0.cmp(&b.0)); + + // Filtrar por prefix (defensivo) + let mut filtered: Vec = + all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); + + // Aplicar start_key EXCLUSIVO: devolver solo las > start + if let Some(s) = start { + // buscar posición exacta, si existe + if let Some(pos) = filtered.iter().position(|k| k.0 == s) { + filtered = filtered.into_iter().skip(pos + 1).collect(); + } else { + // si no está, devolver la primera mayor + filtered = filtered.into_iter().filter(|k| k.0 > s).collect(); + } + } + + // Aplicar count + let take = min(filtered.len(), count as usize); + Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) + } + + fn header( + &self, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.header_calls.fetch_add(1, Ordering::Relaxed); + let key = at.unwrap_or_default(); + let raw = self.headers.read().get(&key).cloned(); + Ok(raw) + } + + fn block( + &self, + hash: Option, + ) -> Result>, jsonrpsee::core::ClientError> { + self.counters.block_calls.fetch_add(1, Ordering::Relaxed); + let key = hash.unwrap_or_default(); + let raw = self.blocks.read().get(&key).cloned(); + Ok(raw) + } + + fn block_hash( + &self, + num: Option>, + ) -> Result, jsonrpsee::core::ClientError> { + todo!() + } + + fn system_chain(&self) -> Result { + todo!() + } + + fn system_properties( + &self, + ) -> Result + { + todo!() + } + } + } + + type N = u32; + type TestBlockT = TestBlock; + + fn make_header(number: N, parent: ::Hash) -> TestHeader { + TestHeader::new( + number.into(), + Default::default(), + Default::default(), + parent, + Default::default(), + ) + } + + fn make_block( + number: N, + parent: ::Hash, + xts: Vec, + ) -> TestBlock { + let header = make_header(number, parent); + TestBlock::new(header, xts) + } + + fn checkpoint(n: N) -> TestHeader { + make_header(n, Default::default()) + } + + #[test] + fn before_fork_reads_remote_only() { + let rpc = std::sync::Arc::new(RPC::new()); + // fork checkpoint at #100 + let cp = checkpoint(100); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // state_at(Default::default()) => before_fork=true + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key = b":foo".to_vec(); + // prepare remote value at "block_hash = Default::default()" + let at = Default::default(); + rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); + + // read storage + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"bar".to_vec())); + + // not cached in DB: second read still goes to RPC + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"bar".to_vec())); + assert!(rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed) >= 2); + } + + #[test] + fn after_fork_first_fetch_caches_subsequent_hits_local() { + let rpc = std::sync::Arc::new(RPC::new()); + let cp = checkpoint(10); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // Build a block #11 > checkpoint (#10), with parent #10 + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11.clone(), None); + + // remote storage at fork block (checkpoint hash) + let fork_hash = cp.hash(); + let key = b":k".to_vec(); + rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); + + // Grab state_at(#11): after_fork=false; local DB empty + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read fetches remote and caches + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"v".to_vec())); + + // Mutate RPC to detect second call (remove remote value) + // If second read still tries RPC, it would return None; but it should come from cache. + // So we do not change the mock; instead, assert RPC call count increases only once. + let calls_before = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); + let _ = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); + assert_eq!(calls_before, calls_after, "second hit should be served from cache"); + } + + #[test] + fn removed_keys_prevents_remote_fetch() { + let rpc = std::sync::Arc::new(RPC::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // mark key as removed + let key = b":dead".to_vec(); + state.removed_keys.write().insert(key.clone(), ()); + + // Even if remote has a value, backend must not fetch it + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); + let calls_before = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); + let v = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); + + assert!(v.is_none()); + assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); + } + + #[test] + fn raw_iter_merges_local_then_remote() { + let rpc = std::sync::Arc::new(RPC::new()); + let cp = checkpoint(7); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // block #8 + let b8 = make_block(8, cp.hash(), vec![]); + rpc.put_header(b8.header.clone()); + rpc.put_block(b8.clone(), None); + let state = backend.state_at(b8.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // Preload local DB with key "a1" + state.update_storage(b"a1", &Some(b"v1".to_vec())); + + // Remote has "a2" under same prefix at fork block + rpc.put_storage_keys_page( + cp.hash(), + b"a".to_vec(), + vec![StorageKey(b"a1".to_vec()), StorageKey(b"a2".to_vec())], + ); + rpc.put_storage(cp.hash(), StorageKey(b"a2".to_vec()), StorageData(b"v2".to_vec())); + + let mut args = sp_state_machine::IterArgs::default(); + args.prefix = Some(&b"a"[..]); + let mut it = state.raw_iter(args).unwrap(); + + // next_pair should return ("a1","v1") from local + let p1 = it.next_pair(&state).unwrap().unwrap(); + assert_eq!(p1.0, b"a1".to_vec()); + assert_eq!(p1.1, b"v1".to_vec()); + + // next_pair should now bring remote ("a2","v2") + let p2 = it.next_pair(&state).unwrap().unwrap(); + assert_eq!(p2.0, b"a2".to_vec()); + assert_eq!(p2.1, b"v2".to_vec()); + + // done + assert!(it.next_pair(&state).is_none()); + assert!(it.was_complete()); + } + + #[test] + fn blockchain_header_and_number_are_cached() { + let rpc = std::sync::Arc::new(RPC::new()); + let cp = checkpoint(3); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let chain = backend.blockchain(); + + // prepare one block w/ extrinsics + let xts: Vec = vec![]; + let b4 = make_block(4, cp.hash(), xts.clone()); + let h4 = b4.header().hash(); + rpc.put_block(b4.clone(), None); + + // first header() fetches RPC and caches as Full + let h = chain.header(h4).unwrap().unwrap(); + assert_eq!(h.hash(), h4); + + // number() should now return from cache (no extra RPC needed) + let calls_before = rpc.counters.block_calls.load(std::sync::atomic::Ordering::Relaxed); + let number = chain.number(h4).unwrap().unwrap(); + let calls_after = rpc.counters.block_calls.load(std::sync::atomic::Ordering::Relaxed); + + assert_eq!(number, 4); + assert_eq!( + calls_before, calls_after, + "number() should be served from cache after header()" + ); + } +} \ No newline at end of file diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs new file mode 100644 index 0000000000000..f551e7b1f13e0 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/mod.rs @@ -0,0 +1,4 @@ +pub mod backend; +pub mod rpc_client; + +pub const LAZY_LOADING_LOG_TARGET: &str = "lazy-loading"; \ No newline at end of file diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs new file mode 100644 index 0000000000000..c20ffb4e34069 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs @@ -0,0 +1,50 @@ +use jsonrpsee::core::ClientError; +use polkadot_sdk::{ + sc_chain_spec, + sp_api::__private::HeaderT, + sp_runtime::{generic::SignedBlock, traits::Block as BlockT}, + sp_state_machine, + sp_storage::{StorageData, StorageKey}, +}; +use serde::de::DeserializeOwned; + +pub trait RPCClient: Send + Sync + std::fmt::Debug { + fn system_chain(&self) -> Result; + + fn system_properties(&self) -> Result; + + fn block( + &self, + hash: Option, + ) -> Result>, jsonrpsee::core::ClientError>; + + fn block_hash( + &self, + block_number: Option<::Number>, + ) -> Result, jsonrpsee::core::ClientError>; + + fn header( + &self, + hash: Option, + ) -> Result, jsonrpsee::core::ClientError>; + + fn storage( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError>; + + fn storage_hash( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError>; + + fn storage_keys_paged( + &self, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, ClientError>; +} \ No newline at end of file diff --git a/crates/anvil-polkadot/src/substrate_node/mod.rs b/crates/anvil-polkadot/src/substrate_node/mod.rs index 28ae7c067acc7..052df0efc7eb9 100644 --- a/crates/anvil-polkadot/src/substrate_node/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/mod.rs @@ -7,3 +7,4 @@ pub mod mining_engine; pub mod rpc; pub mod service; pub mod snapshot; +mod lazy_loading; diff --git a/crates/anvil-polkadot/src/substrate_node/service/backend.rs b/crates/anvil-polkadot/src/substrate_node/service/backend.rs index 331bedf7b748c..169295e88e597 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/backend.rs @@ -1,6 +1,9 @@ -use crate::substrate_node::service::{ - Backend, - storage::{CodeInfo, ReviveAccountInfo, SystemAccountInfo, well_known_keys}, +use crate::substrate_node::{ + service::{ + Backend, + storage::{CodeInfo, ReviveAccountInfo, SystemAccountInfo, well_known_keys}, + }, + lazy_loading::backend::Blockchain }; use alloy_primitives::{Address, Bytes}; use codec::{Decode, Encode}; @@ -9,7 +12,6 @@ use parking_lot::Mutex; use polkadot_sdk::{ parachains_common::{AccountId, Hash, opaque::Block}, sc_client_api::{Backend as BackendT, StateBackend, TrieCacheContext}, - sc_client_db::BlockchainDb, sp_blockchain, sp_core::{H160, H256}, sp_io::hashing::blake2_256, @@ -60,7 +62,7 @@ impl BackendWithOverlay { Self { backend, overrides } } - pub fn blockchain(&self) -> &BlockchainDb { + pub fn blockchain(&self) -> &Blockchain { self.backend.blockchain() } diff --git a/crates/anvil-polkadot/src/substrate_node/service/client.rs b/crates/anvil-polkadot/src/substrate_node/service/client.rs index c334c4a17c79f..af6007c169c7e 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/client.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/client.rs @@ -5,13 +5,15 @@ use crate::substrate_node::{ backend::StorageOverrides, executor::{Executor, WasmExecutor}, }, + lazy_loading::backend::new_backend as new_lazy_loading_backend, }; use parking_lot::Mutex; use polkadot_sdk::{ - parachains_common::opaque::Block, + sp_runtime::traits::Header as HeaderT, + parachains_common::opaque::{Block, Header}, sc_chain_spec::get_extension, sc_client_api::{BadBlocks, ForkBlocks, execution_extensions::ExecutionExtensions}, - sc_service::{self, KeystoreContainer, LocalCallExecutor, TaskManager, new_db_backend}, + sc_service::{self, KeystoreContainer, LocalCallExecutor, TaskManager}, sp_keystore::KeystorePtr, }; use std::{collections::HashMap, sync::Arc}; @@ -25,7 +27,10 @@ pub fn new_client( executor: WasmExecutor, storage_overrides: Arc>, ) -> Result<(Arc, Arc, KeystorePtr, TaskManager), sc_service::error::Error> { - let backend = new_db_backend(config.db_config())?; + + let checkpoint = Header::new(genesis_block_number.try_into().unwrap_or(0), Default::default(), Default::default(), Default::default(), Default::default()); + + let backend = new_lazy_loading_backend(None, checkpoint)?; let genesis_block_builder = DevelopmentGenesisBlockBuilder::new( genesis_block_number, diff --git a/crates/anvil-polkadot/src/substrate_node/service/mod.rs b/crates/anvil-polkadot/src/substrate_node/service/mod.rs index fa9a36f1cfa6e..d6cdf5d036271 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/mod.rs @@ -4,6 +4,7 @@ use crate::{ mining_engine::{MiningEngine, MiningMode, run_mining_engine}, rpc::spawn_rpc_server, service::consensus::SameSlotConsensusDataProvider, + lazy_loading::backend::Backend as LazyLoadingBackend, }, }; use anvil::eth::backend::time::TimeManager; @@ -30,7 +31,7 @@ mod consensus; mod executor; pub mod storage; -pub type Backend = sc_service::TFullBackend; +pub type Backend = LazyLoadingBackend; pub type TransactionPoolHandle = sc_transaction_pool::TransactionPoolHandle; From 891d2f2b6147993b2ea90844a0969465e4bc25fa Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 3 Nov 2025 17:07:51 -0300 Subject: [PATCH 02/44] Fix fmt --- .../substrate_node/lazy_loading/backend.rs | 59 ++++++++++--------- .../src/substrate_node/lazy_loading/mod.rs | 2 +- .../substrate_node/lazy_loading/rpc_client.rs | 2 +- .../anvil-polkadot/src/substrate_node/mod.rs | 2 +- .../src/substrate_node/service/backend.rs | 2 +- .../src/substrate_node/service/client.rs | 13 ++-- .../src/substrate_node/service/mod.rs | 2 +- 7 files changed, 46 insertions(+), 36 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index da68b05c9d672..d7f8c0f7b6abe 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -1,6 +1,5 @@ -use super::{ - rpc_client::RPCClient, -}; +use super::rpc_client::RPCClient; +use alloy_primitives::hex; use polkadot_sdk::{ sc_client_api::{ StorageKey, TrieCacheContext, UsageInfo, @@ -30,7 +29,6 @@ use std::{ ptr, sync::Arc, }; -use alloy_primitives::hex; struct PendingBlock { block: StoredBlock, @@ -114,7 +112,7 @@ impl Blockchain { target: super::LAZY_LOADING_LOG_TARGET, "🏗️ Creating new Blockchain storage (empty)" ); - + let storage = Arc::new(parking_lot::RwLock::new(BlockchainStorage { blocks: HashMap::new(), hashes: HashMap::new(), @@ -143,9 +141,9 @@ impl Blockchain { target: super::LAZY_LOADING_LOG_TARGET, "Looking up block hash for number={}", n ); - + let block_hash = self.storage.read().hashes.get(&n).cloned(); - + log::info!( target: super::LAZY_LOADING_LOG_TARGET, "Lookup result: number={}, found={}, total_hashes={}", @@ -153,7 +151,7 @@ impl Blockchain { block_hash.is_some(), self.storage.read().hashes.len() ); - + match block_hash { None => { log::info!( @@ -183,7 +181,7 @@ impl Blockchain { new_state: NewBlockState, ) -> sp_blockchain::Result<()> { let number = *header.number(); - + log::info!( target: super::LAZY_LOADING_LOG_TARGET, "Inserting block: number={}, hash={:?}, new_state={:?}", @@ -191,17 +189,17 @@ impl Blockchain { hash, new_state ); - + if new_state.is_best() { self.apply_head(&header)?; } let mut storage = self.storage.write(); - + // Always insert the block into blocks and hashes storage storage.blocks.insert(hash, StoredBlock::new(header.clone(), body, justifications)); storage.hashes.insert(number, hash); - + log::info!( target: super::LAZY_LOADING_LOG_TARGET, "Block inserted successfully: number={}, hash={:?}. Total blocks={}, Total hashes={}", @@ -210,17 +208,17 @@ impl Blockchain { storage.blocks.len(), storage.hashes.len() ); - + // Set genesis_hash only for the first block inserted if storage.blocks.len() == 1 { storage.genesis_hash = hash; } - + // Update leaves for non-genesis blocks if storage.blocks.len() > 1 { storage.leaves.import(hash, number, *header.parent_hash()); } - + // Finalize block only if explicitly requested via new_state if let NewBlockState::Final = new_state { storage.finalized_hash = hash; @@ -400,7 +398,7 @@ impl HeaderBackend for Blockchain HeaderBackend for Blockchain sp_state_machine::StorageIterator, start_key: Option, block: Option| { - backend.rpc().and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()).and_then(|keys| keys.first().map(|key| key.clone())) + backend + .rpc() + .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) + .and_then(|keys| keys.first().map(|key| key.clone())) }; let prefix = self.args.prefix.clone().map(|k| StorageKey(k)); @@ -837,7 +838,10 @@ impl sp_state_machine::StorageIterator, start_key: Option, block: Option| { - backend.rpc().and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()).and_then(|keys| keys.first().map(|key| key.clone())) + backend + .rpc() + .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) + .and_then(|keys| keys.first().map(|key| key.clone())) }; let prefix = self.args.prefix.clone().map(|k| StorageKey(k)); @@ -1002,11 +1006,8 @@ impl sp_state_machine::Backend Some(data), _ if !self.removed_keys.read().contains_key(key) => { // Only try remote fetch if RPC client is available - let result = if self.rpc().is_some() { - remote_fetch(Some(self.fork_block)) - } else { - None - }; + let result = + if self.rpc().is_some() { remote_fetch(Some(self.fork_block)) } else { None }; // Cache state drop(readable_db); @@ -1103,7 +1104,9 @@ impl sp_state_machine::Backend Result, Self::Error> { let remote_fetch = |block: Option| { let start_key = Some(StorageKey(key.to_vec())); - self.rpc().and_then(|rpc| rpc.storage_keys_paged(start_key.clone(), 2, None, block).ok()).and_then(|keys| keys.last().map(|key| key.clone())) + self.rpc() + .and_then(|rpc| rpc.storage_keys_paged(start_key.clone(), 2, None, block).ok()) + .and_then(|keys| keys.last().map(|key| key.clone())) }; let maybe_next_key = if self.before_fork { @@ -1375,7 +1378,9 @@ impl backend::Backend for Backend: Send + Sync + std::fmt::D start_key: Option, at: Option, ) -> Result, ClientError>; -} \ No newline at end of file +} diff --git a/crates/anvil-polkadot/src/substrate_node/mod.rs b/crates/anvil-polkadot/src/substrate_node/mod.rs index 052df0efc7eb9..a4df2de2b63a9 100644 --- a/crates/anvil-polkadot/src/substrate_node/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/mod.rs @@ -3,8 +3,8 @@ pub mod genesis; pub mod host; pub mod impersonation; pub mod in_mem_rpc; +mod lazy_loading; pub mod mining_engine; pub mod rpc; pub mod service; pub mod snapshot; -mod lazy_loading; diff --git a/crates/anvil-polkadot/src/substrate_node/service/backend.rs b/crates/anvil-polkadot/src/substrate_node/service/backend.rs index 169295e88e597..bd111707d72ca 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/backend.rs @@ -1,9 +1,9 @@ use crate::substrate_node::{ + lazy_loading::backend::Blockchain, service::{ Backend, storage::{CodeInfo, ReviveAccountInfo, SystemAccountInfo, well_known_keys}, }, - lazy_loading::backend::Blockchain }; use alloy_primitives::{Address, Bytes}; use codec::{Decode, Encode}; diff --git a/crates/anvil-polkadot/src/substrate_node/service/client.rs b/crates/anvil-polkadot/src/substrate_node/service/client.rs index af6007c169c7e..a0b4a6a162a84 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/client.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/client.rs @@ -1,20 +1,20 @@ use crate::substrate_node::{ genesis::DevelopmentGenesisBlockBuilder, + lazy_loading::backend::new_backend as new_lazy_loading_backend, service::{ Backend, backend::StorageOverrides, executor::{Executor, WasmExecutor}, }, - lazy_loading::backend::new_backend as new_lazy_loading_backend, }; use parking_lot::Mutex; use polkadot_sdk::{ - sp_runtime::traits::Header as HeaderT, parachains_common::opaque::{Block, Header}, sc_chain_spec::get_extension, sc_client_api::{BadBlocks, ForkBlocks, execution_extensions::ExecutionExtensions}, sc_service::{self, KeystoreContainer, LocalCallExecutor, TaskManager}, sp_keystore::KeystorePtr, + sp_runtime::traits::Header as HeaderT, }; use std::{collections::HashMap, sync::Arc}; use substrate_runtime::RuntimeApi; @@ -27,8 +27,13 @@ pub fn new_client( executor: WasmExecutor, storage_overrides: Arc>, ) -> Result<(Arc, Arc, KeystorePtr, TaskManager), sc_service::error::Error> { - - let checkpoint = Header::new(genesis_block_number.try_into().unwrap_or(0), Default::default(), Default::default(), Default::default(), Default::default()); + let checkpoint = Header::new( + genesis_block_number.try_into().unwrap_or(0), + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); let backend = new_lazy_loading_backend(None, checkpoint)?; diff --git a/crates/anvil-polkadot/src/substrate_node/service/mod.rs b/crates/anvil-polkadot/src/substrate_node/service/mod.rs index d6cdf5d036271..e8f7c95acc2f8 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/mod.rs @@ -1,10 +1,10 @@ use crate::{ AnvilNodeConfig, substrate_node::{ + lazy_loading::backend::Backend as LazyLoadingBackend, mining_engine::{MiningEngine, MiningMode, run_mining_engine}, rpc::spawn_rpc_server, service::consensus::SameSlotConsensusDataProvider, - lazy_loading::backend::Backend as LazyLoadingBackend, }, }; use anvil::eth::backend::time::TimeManager; From bc5bddf9cb247654b40bfda4ca695acb7f0efe12 Mon Sep 17 00:00:00 2001 From: Alexandru Gheorghe <49718502+alexggh@users.noreply.github.com> Date: Tue, 4 Nov 2025 14:41:51 +0200 Subject: [PATCH 03/44] fix fuzzer (#374) * fix fuzzer Signed-off-by: Alexandru Gheorghe * update snapshots as temporary solution * make the CI runs a bit more stable Signed-off-by: Alexandru Gheorghe --------- Signed-off-by: Alexandru Gheorghe Co-authored-by: Pavlo Khrystenko --- .config/nextest.toml | 4 +- crates/evm/evm/src/executors/fuzz/mod.rs | 4 +- crates/forge/tests/cli/revive_vm.rs | 182 +++++++++++------------ 3 files changed, 96 insertions(+), 94 deletions(-) diff --git a/.config/nextest.toml b/.config/nextest.toml index ad567c11943e6..dfee93312a4dc 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -3,8 +3,8 @@ chisel-serial = { max-threads = 1 } polkadot-localnode-serial = { max-threads = 1 } [profile.default] -retries = { backoff = "exponential", count = 2, delay = "5s", jitter = true } -slow-timeout = { period = "1m", terminate-after = 3 } +retries = { backoff = "exponential", count = 3, delay = "10s", jitter = true } +slow-timeout = { period = "5m", terminate-after = 4 } [[profile.default.overrides]] filter = "test(/ext_integration|can_test_forge_std/)" diff --git a/crates/evm/evm/src/executors/fuzz/mod.rs b/crates/evm/evm/src/executors/fuzz/mod.rs index bf5004bd497bb..93fef3eeb0fc8 100644 --- a/crates/evm/evm/src/executors/fuzz/mod.rs +++ b/crates/evm/evm/src/executors/fuzz/mod.rs @@ -107,9 +107,11 @@ impl FuzzedExecutor { return Err(TestCaseError::fail(TEST_TIMEOUT)); } self.executor.strategy.runner.checkpoint(); - let fuzz_res = self.single_fuzz(address, calldata)?; + let fuzz_res = self.single_fuzz(address, calldata); self.executor.strategy.runner.reload_checkpoint(); + let fuzz_res = fuzz_res?; + // If running with progress then increment current run. if let Some(progress) = progress { progress.inc(1); diff --git a/crates/forge/tests/cli/revive_vm.rs b/crates/forge/tests/cli/revive_vm.rs index ef6d68101a6d1..f8481659acfa9 100644 --- a/crates/forge/tests/cli/revive_vm.rs +++ b/crates/forge/tests/cli/revive_vm.rs @@ -472,53 +472,53 @@ Compiler run successful! Ran 2 tests for src/CounterTest.t.sol:CounterTest [PASS] test_Increment() ([GAS]) Traces: - [765075403] CounterTest::setUp() - ├─ [262294819] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 7404 bytes of code + [762904767] CounterTest::setUp() + ├─ [260881139] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 7349 bytes of code ├─ [0] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [385250826] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [384529579] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] - [737726031] CounterTest::test_Increment() - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + [736989721] CounterTest::test_Increment() + ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 5 - ├─ [385250826] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(55) + ├─ [384621643] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(55) │ ├─ emit SetNumber(result: 55) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 55 ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::increment() │ ├─ emit Increment(result: 56) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 56 └─ ← [Stop] [PASS] test_expectRevert() ([GAS]) Traces: - [765075403] CounterTest::setUp() - ├─ [262294819] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 7404 bytes of code + [762904767] CounterTest::setUp() + ├─ [260881139] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 7349 bytes of code ├─ [0] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [385250826] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [384529579] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [117489011] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] - [56930227] CounterTest::test_expectRevert() + [57445601] CounterTest::test_expectRevert() ├─ [0] VM::expectRevert(custom error 0xf28dceb3: 0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006456941a80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000076661696c7572650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) │ └─ ← [Return] - ├─ [56921388] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::failed_call() [staticcall] + ├─ [57436762] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::failed_call() [staticcall] │ └─ ← [Revert] Revert("failure") └─ ← [Stop] @@ -654,14 +654,14 @@ Compiler run successful! Ran 2 tests for src/Test.t.sol:RecordTest [PASS] testRecordAccess() ([GAS]) Traces: - [961089406] RecordTest::testRecordAccess() - ├─ [16788608] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 4095 bytes of code - ├─ [16788608] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 2182 bytes of code + [957517995] RecordTest::testRecordAccess() + ├─ [15686442] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 4043 bytes of code + ├─ [15686442] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 2137 bytes of code ├─ [0] VM::record() │ └─ ← [Return] - ├─ [927440089] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) + ├─ [926073010] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() │ │ └─ ← [Return] │ └─ ← [Stop] @@ -673,14 +673,14 @@ Traces: [PASS] testStopRecordAccess() ([GAS]) Traces: - [961093272] RecordTest::testStopRecordAccess() - ├─ [16788608] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 4095 bytes of code - ├─ [16788608] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 2182 bytes of code + [957521861] RecordTest::testStopRecordAccess() + ├─ [15686442] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 4043 bytes of code + ├─ [15686442] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 2137 bytes of code ├─ [0] VM::record() │ └─ ← [Return] - ├─ [927440089] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) + ├─ [926073010] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() │ │ └─ ← [Return] │ └─ ← [Stop] @@ -971,18 +971,18 @@ Compiler run successful! Ran 7 tests for src/Test.t.sol:RecordLogsTest [PASS] testEmitRecordEmit() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] - [357757177] RecordLogsTest::testEmitRecordEmit() - ├─ [183812741] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + [359596403] RecordLogsTest::testEmitRecordEmit() + ├─ [184732354] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic12(topic1: 1, topic2: 2, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] ├─ [0] VM::recordLogs() │ └─ ← [Return] - ├─ [173888857] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) + ├─ [174808470] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic1(topic1: 3, data: 0x2e38edeff9493e0004540e975027a429) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -993,13 +993,13 @@ Traces: [PASS] testRecordOffGetsNothing() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] - [202674284] RecordLogsTest::testRecordOffGetsNothing() - ├─ [202625294] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + [203593897] RecordLogsTest::testRecordOffGetsNothing() + ├─ [203544907] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1010,20 +1010,20 @@ Traces: [PASS] testRecordOnEmitDifferentDepths() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] - [999237291] RecordLogsTest::testRecordOnEmitDifferentDepths() + [997770019] RecordLogsTest::testRecordOnEmitDifferentDepths() ├─ [0] VM::recordLogs() │ └─ ← [Return] ├─ emit LogTopic(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) - ├─ [180758801] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [181678414] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [818371229] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 10554 bytes of code + ├─ [815984344] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 10517 bytes of code ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ ├─ [0] 0x104fBc016F4bb334D775a19E8A6510109AC63E00::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) @@ -1037,9 +1037,9 @@ Traces: [PASS] testRecordOnNoLogs() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] [4118] RecordLogsTest::testRecordOnNoLogs() @@ -1051,15 +1051,15 @@ Traces: [PASS] testRecordOnSingleLog() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] - [187093023] RecordLogsTest::testRecordOnSingleLog() + [188012636] RecordLogsTest::testRecordOnSingleLog() ├─ [0] VM::recordLogs() │ └─ ← [Return] - ├─ [187077066] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) + ├─ [187996679] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x4576656e74204461746120696e20537472696e67) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1068,15 +1068,15 @@ Traces: [PASS] testRecordOnSingleLogTopic0() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] - [184656340] RecordLogsTest::testRecordOnSingleLogTopic0() + [185575953] RecordLogsTest::testRecordOnSingleLogTopic0() ├─ [0] VM::recordLogs() │ └─ ← [Return] - ├─ [184603101] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + ├─ [185522714] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1087,33 +1087,33 @@ Traces: [PASS] testRecordsConsumednAsRead() ([GAS]) Traces: - [16868742] RecordLogsTest::setUp() - ├─ [16830999] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12583 bytes of code + [15766576] RecordLogsTest::setUp() + ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 12544 bytes of code └─ ← [Stop] - [903065419] RecordLogsTest::testRecordsConsumednAsRead() - ├─ [173888857] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) + [907663484] RecordLogsTest::testRecordsConsumednAsRead() + ├─ [174808470] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) │ ├─ emit LogTopic1(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) │ └─ ← [Stop] ├─ [0] VM::recordLogs() │ └─ ← [Return] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [] - ├─ [181776781] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + ├─ [182696394] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [187077066] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [187996679] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [172108813] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + ├─ [173028426] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [188095046] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) + ├─ [189014659] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ ├─ emit LogTopic123(topic1: 7, topic2: 8, topic3: 9, data: 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1267,17 +1267,17 @@ Compiler run successful! Ran 3 tests for src/Test.t.sol:StateDiffTest [PASS] testCallProxyaccesses() ([GAS]) Traces: - [585251161] StateDiffTest::setUp() - ├─ [292049387] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5531 bytes of code - ├─ [293109162] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6405 bytes of code + [583131611] StateDiffTest::setUp() + ├─ [290989612] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 5485 bytes of code + ├─ [292049387] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 6359 bytes of code └─ ← [Stop] - [728077974] StateDiffTest::testCallProxyaccesses() + [727692988] StateDiffTest::testCallProxyaccesses() ├─ [0] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [728040641] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::proxyCall(55) + ├─ [727655655] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::proxyCall(55) │ ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) │ │ └─ ← [Return] │ └─ ← [Stop] @@ -1287,17 +1287,17 @@ Traces: [PASS] testCallaccesses() ([GAS]) Traces: - [585251161] StateDiffTest::setUp() - ├─ [292049387] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5531 bytes of code - ├─ [293109162] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6405 bytes of code + [583131611] StateDiffTest::setUp() + ├─ [290989612] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 5485 bytes of code + ├─ [292049387] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 6359 bytes of code └─ ← [Stop] - [276825754] StateDiffTest::testCallaccesses() + [276251742] StateDiffTest::testCallaccesses() ├─ [0] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [276796934] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) + ├─ [276222922] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) │ └─ ← [Stop] ├─ [0] VM::stopAndReturnStateDiff() │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 1)] @@ -1305,18 +1305,18 @@ Traces: [PASS] testCreateaccesses() ([GAS]) Traces: - [585251161] StateDiffTest::setUp() - ├─ [292049387] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5531 bytes of code - ├─ [293109162] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6405 bytes of code + [583131611] StateDiffTest::setUp() + ├─ [290989612] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] 5485 bytes of code + ├─ [292049387] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 6359 bytes of code └─ ← [Stop] - [292103665] StateDiffTest::testCreateaccesses() + [291043890] StateDiffTest::testCreateaccesses() ├─ [0] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [292049387] → new @0x2e234DAe75C793f67A35089C9d99245E1C58470b - │ └─ ← [Return] 5531 bytes of code + ├─ [290989612] → new @0x2e234DAe75C793f67A35089C9d99245E1C58470b + │ └─ ← [Return] 5485 bytes of code ├─ [0] VM::stopAndReturnStateDiff() │ └─ ← [Return] [((0, 31337 [3.133e4]), 4, 0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 1000000000000000000 [1e18], 0x0000000000000000000000000000000000000000000000000000000000000064, false, [(0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000, false), (0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000064, false)], 1)] └─ ← [Stop] From e34ed8392834f95eb1890bdc3553d91602792f15 Mon Sep 17 00:00:00 2001 From: Alexandru Gheorghe <49718502+alexggh@users.noreply.github.com> Date: Tue, 4 Nov 2025 17:14:34 +0200 Subject: [PATCH 04/44] fixup invariant fuzzer (#376) Signed-off-by: Alexandru Gheorghe --- crates/evm/evm/src/executors/invariant/mod.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/evm/evm/src/executors/invariant/mod.rs b/crates/evm/evm/src/executors/invariant/mod.rs index 54a73cf2733b8..3c284a4a0a70d 100644 --- a/crates/evm/evm/src/executors/invariant/mod.rs +++ b/crates/evm/evm/src/executors/invariant/mod.rs @@ -392,9 +392,10 @@ impl<'a> InvariantExecutor<'a> { .last() .ok_or_else(|| eyre!("no input generated to call fuzzed target."))?; + self.executor.strategy.runner.checkpoint(); // Execute call from the randomly generated sequence without committing state. // State is committed only if call is not a magic assume. - let mut call_result = current_run + let call_result = current_run .executor .call_raw( tx.sender, @@ -402,7 +403,10 @@ impl<'a> InvariantExecutor<'a> { tx.call_details.calldata.clone(), U256::ZERO, ) - .map_err(|e| eyre!(format!("Could not make raw evm call: {e}")))?; + .map_err(|e| eyre!(format!("Could not make raw evm call: {e}"))); + self.executor.strategy.runner.reload_checkpoint(); + + let mut call_result = call_result?; let discarded = call_result.result.as_ref() == MAGIC_ASSUME; if self.config.show_metrics { From 09f86d94e5d73c7e72026468d4b098c95f3ab8c2 Mon Sep 17 00:00:00 2001 From: Alexandru Gheorghe <49718502+alexggh@users.noreply.github.com> Date: Tue, 4 Nov 2025 17:15:16 +0200 Subject: [PATCH 05/44] fixup calling test contract (#377) Signed-off-by: Alexandru Gheorghe --- crates/revive-strategy/src/cheatcodes/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/revive-strategy/src/cheatcodes/mod.rs b/crates/revive-strategy/src/cheatcodes/mod.rs index b31660617ae7f..5fe5c4d81af6e 100644 --- a/crates/revive-strategy/src/cheatcodes/mod.rs +++ b/crates/revive-strategy/src/cheatcodes/mod.rs @@ -878,7 +878,7 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector .journaled_state .database .get_test_contract_address() - .map(|addr| call.bytecode_address == addr) + .map(|addr| call.bytecode_address == addr || call.target_address == addr) .unwrap_or_default() { tracing::info!( From 52c324956924ec5d7a2f7abe27ef9a0825edb72b Mon Sep 17 00:00:00 2001 From: Pavlo Khrystenko <45178695+pkhry@users.noreply.github.com> Date: Tue, 4 Nov 2025 16:43:28 +0100 Subject: [PATCH 06/44] stabilise snapshots (#384) --- crates/forge/tests/cli/revive_vm.rs | 182 ++++++++++++++-------------- 1 file changed, 91 insertions(+), 91 deletions(-) diff --git a/crates/forge/tests/cli/revive_vm.rs b/crates/forge/tests/cli/revive_vm.rs index f8481659acfa9..e003c4c1870b1 100644 --- a/crates/forge/tests/cli/revive_vm.rs +++ b/crates/forge/tests/cli/revive_vm.rs @@ -472,53 +472,53 @@ Compiler run successful! Ran 2 tests for src/CounterTest.t.sol:CounterTest [PASS] test_Increment() ([GAS]) Traces: - [762904767] CounterTest::setUp() - ├─ [260881139] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 7349 bytes of code + [..] CounterTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code ├─ [0] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [384529579] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] - [736989721] CounterTest::test_Increment() - ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + [..] CounterTest::test_Increment() + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 5 - ├─ [384621643] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(55) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(55) │ ├─ emit SetNumber(result: 55) │ └─ ← [Stop] - ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 55 ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::increment() │ ├─ emit Increment(result: 56) │ └─ ← [Stop] - ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 56 └─ ← [Stop] [PASS] test_expectRevert() ([GAS]) Traces: - [762904767] CounterTest::setUp() - ├─ [260881139] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 7349 bytes of code + [..] CounterTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code ├─ [0] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [384529579] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [117453302] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] - [57445601] CounterTest::test_expectRevert() + [..] CounterTest::test_expectRevert() ├─ [0] VM::expectRevert(custom error 0xf28dceb3: 0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006456941a80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000076661696c7572650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) │ └─ ← [Return] - ├─ [57436762] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::failed_call() [staticcall] + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::failed_call() [staticcall] │ └─ ← [Revert] Revert("failure") └─ ← [Stop] @@ -654,14 +654,14 @@ Compiler run successful! Ran 2 tests for src/Test.t.sol:RecordTest [PASS] testRecordAccess() ([GAS]) Traces: - [957517995] RecordTest::testRecordAccess() - ├─ [15686442] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 4043 bytes of code - ├─ [15686442] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 2137 bytes of code + [..] RecordTest::testRecordAccess() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] [..] bytes of code ├─ [0] VM::record() │ └─ ← [Return] - ├─ [926073010] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() │ │ └─ ← [Return] │ └─ ← [Stop] @@ -673,14 +673,14 @@ Traces: [PASS] testStopRecordAccess() ([GAS]) Traces: - [957521861] RecordTest::testStopRecordAccess() - ├─ [15686442] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 4043 bytes of code - ├─ [15686442] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 2137 bytes of code + [..] RecordTest::testStopRecordAccess() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] [..] bytes of code ├─ [0] VM::record() │ └─ ← [Return] - ├─ [926073010] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() │ │ └─ ← [Return] │ └─ ← [Stop] @@ -971,18 +971,18 @@ Compiler run successful! Ran 7 tests for src/Test.t.sol:RecordLogsTest [PASS] testEmitRecordEmit() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [359596403] RecordLogsTest::testEmitRecordEmit() - ├─ [184732354] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + [..] RecordLogsTest::testEmitRecordEmit() + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic12(topic1: 1, topic2: 2, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] ├─ [0] VM::recordLogs() │ └─ ← [Return] - ├─ [174808470] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic1(topic1: 3, data: 0x2e38edeff9493e0004540e975027a429) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -993,13 +993,13 @@ Traces: [PASS] testRecordOffGetsNothing() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [203593897] RecordLogsTest::testRecordOffGetsNothing() - ├─ [203544907] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + [..] RecordLogsTest::testRecordOffGetsNothing() + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1010,20 +1010,20 @@ Traces: [PASS] testRecordOnEmitDifferentDepths() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [997770019] RecordLogsTest::testRecordOnEmitDifferentDepths() + [..] RecordLogsTest::testRecordOnEmitDifferentDepths() ├─ [0] VM::recordLogs() │ └─ ← [Return] ├─ emit LogTopic(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) - ├─ [181678414] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [815984344] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 10517 bytes of code + ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] [..] bytes of code ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ ├─ [0] 0x104fBc016F4bb334D775a19E8A6510109AC63E00::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) @@ -1037,9 +1037,9 @@ Traces: [PASS] testRecordOnNoLogs() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [4118] RecordLogsTest::testRecordOnNoLogs() @@ -1051,15 +1051,15 @@ Traces: [PASS] testRecordOnSingleLog() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [188012636] RecordLogsTest::testRecordOnSingleLog() + [..] RecordLogsTest::testRecordOnSingleLog() ├─ [0] VM::recordLogs() │ └─ ← [Return] - ├─ [187996679] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x4576656e74204461746120696e20537472696e67) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1068,15 +1068,15 @@ Traces: [PASS] testRecordOnSingleLogTopic0() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [185575953] RecordLogsTest::testRecordOnSingleLogTopic0() + [..] RecordLogsTest::testRecordOnSingleLogTopic0() ├─ [0] VM::recordLogs() │ └─ ← [Return] - ├─ [185522714] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1087,33 +1087,33 @@ Traces: [PASS] testRecordsConsumednAsRead() ([GAS]) Traces: - [15766576] RecordLogsTest::setUp() - ├─ [15728833] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 12544 bytes of code + [..] RecordLogsTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [907663484] RecordLogsTest::testRecordsConsumednAsRead() - ├─ [174808470] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) + [..] RecordLogsTest::testRecordsConsumednAsRead() + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) │ ├─ emit LogTopic1(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) │ └─ ← [Stop] ├─ [0] VM::recordLogs() │ └─ ← [Return] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [] - ├─ [182696394] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [187996679] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [173028426] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [189014659] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ ├─ emit LogTopic123(topic1: 7, topic2: 8, topic3: 9, data: 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() @@ -1267,17 +1267,17 @@ Compiler run successful! Ran 3 tests for src/Test.t.sol:StateDiffTest [PASS] testCallProxyaccesses() ([GAS]) Traces: - [583131611] StateDiffTest::setUp() - ├─ [290989612] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5485 bytes of code - ├─ [292049387] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6359 bytes of code + [..] StateDiffTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [727692988] StateDiffTest::testCallProxyaccesses() + [..] StateDiffTest::testCallProxyaccesses() ├─ [0] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [727655655] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::proxyCall(55) + ├─ [..] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::proxyCall(55) │ ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) │ │ └─ ← [Return] │ └─ ← [Stop] @@ -1287,17 +1287,17 @@ Traces: [PASS] testCallaccesses() ([GAS]) Traces: - [583131611] StateDiffTest::setUp() - ├─ [290989612] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5485 bytes of code - ├─ [292049387] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6359 bytes of code + [..] StateDiffTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [276251742] StateDiffTest::testCallaccesses() + [..] StateDiffTest::testCallaccesses() ├─ [0] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [276222922] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) + ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) │ └─ ← [Stop] ├─ [0] VM::stopAndReturnStateDiff() │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 1)] @@ -1305,18 +1305,18 @@ Traces: [PASS] testCreateaccesses() ([GAS]) Traces: - [583131611] StateDiffTest::setUp() - ├─ [290989612] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC - │ └─ ← [Return] 5485 bytes of code - ├─ [292049387] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f - │ └─ ← [Return] 6359 bytes of code + [..] StateDiffTest::setUp() + ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + │ └─ ← [Return] [..] bytes of code + ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [291043890] StateDiffTest::testCreateaccesses() + [..] StateDiffTest::testCreateaccesses() ├─ [0] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [290989612] → new @0x2e234DAe75C793f67A35089C9d99245E1C58470b - │ └─ ← [Return] 5485 bytes of code + ├─ [..] → new @0x2e234DAe75C793f67A35089C9d99245E1C58470b + │ └─ ← [Return] [..] bytes of code ├─ [0] VM::stopAndReturnStateDiff() │ └─ ← [Return] [((0, 31337 [3.133e4]), 4, 0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 1000000000000000000 [1e18], 0x0000000000000000000000000000000000000000000000000000000000000064, false, [(0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000, false), (0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000064, false)], 1)] └─ ← [Stop] From 1d6e7fa1fd791696f6c049b249331e5b2bcfac41 Mon Sep 17 00:00:00 2001 From: Dragan Milosevic Date: Tue, 4 Nov 2025 17:59:51 +0100 Subject: [PATCH 07/44] feat(anvil-polkadot): add transaction pool RPCs 2/2 (#370) * implement tx_pool inspect * add txpool_inspect RPC test * implement txpool_content * add txpool_content RPC test * implement remove_pool_transactions * add remove_pool_transactions RPC test * Implements sender recovery logic for impersonated transactions in txpool * add impersonation support test for txpool RPCs --- crates/anvil-polkadot/src/api_server/mod.rs | 3 + .../anvil-polkadot/src/api_server/server.rs | 130 +++++--- .../src/api_server/txpool_helpers.rs | 200 ++++++++++++ .../anvil-polkadot/src/substrate_node/host.rs | 19 +- crates/anvil-polkadot/tests/it/txpool.rs | 290 +++++++++++++++++- 5 files changed, 596 insertions(+), 46 deletions(-) create mode 100644 crates/anvil-polkadot/src/api_server/txpool_helpers.rs diff --git a/crates/anvil-polkadot/src/api_server/mod.rs b/crates/anvil-polkadot/src/api_server/mod.rs index 38c4ac053fdaa..bda613cddce4f 100644 --- a/crates/anvil-polkadot/src/api_server/mod.rs +++ b/crates/anvil-polkadot/src/api_server/mod.rs @@ -15,6 +15,9 @@ pub mod error; pub mod revive_conversions; mod server; mod signer; +mod txpool_helpers; + +pub use txpool_helpers::TxpoolTransactionInfo; pub type ApiHandle = mpsc::Sender; diff --git a/crates/anvil-polkadot/src/api_server/server.rs b/crates/anvil-polkadot/src/api_server/server.rs index 59ae02862b794..3dc8332460da1 100644 --- a/crates/anvil-polkadot/src/api_server/server.rs +++ b/crates/anvil-polkadot/src/api_server/server.rs @@ -7,10 +7,15 @@ use crate::{ ReviveFilter, SubstrateU256, convert_to_generic_transaction, }, signer::DevSigner, + txpool_helpers::{ + TxpoolTransactionInfo, extract_sender, extract_tx_info, extract_tx_summary, + transaction_matches_eth_hash, + }, }, logging::LoggingManager, macros::node_info, substrate_node::{ + host::recover_maybe_impersonated_address, impersonation::ImpersonationManager, in_mem_rpc::InMemoryRpcClient, mining_engine::MiningEngine, @@ -30,13 +35,13 @@ use alloy_primitives::{Address, B256, U64, U256}; use alloy_rpc_types::{ Filter, TransactionRequest, anvil::{Metadata as AnvilMetadata, MineOptions, NodeEnvironment, NodeInfo}, - txpool::TxpoolStatus, + txpool::{TxpoolContent, TxpoolInspect, TxpoolStatus}, }; use alloy_serde::WithOtherFields; use alloy_trie::{EMPTY_ROOT_HASH, KECCAK_EMPTY, TrieAccount}; use anvil_core::eth::{EthRequest, Params as MineParams}; use anvil_rpc::response::ResponseResult; -use codec::{Decode, DecodeLimit, Encode}; +use codec::{Decode, Encode}; use futures::{StreamExt, channel::mpsc}; use indexmap::IndexMap; use pallet_revive_eth_rpc::{ @@ -65,7 +70,7 @@ use polkadot_sdk::{ use revm::primitives::hardfork::SpecId; use sqlx::sqlite::SqlitePoolOptions; use std::{collections::HashSet, sync::Arc, time::Duration}; -use substrate_runtime::{Balance, RuntimeCall, UncheckedExtrinsic}; +use substrate_runtime::Balance; use subxt::{ Metadata as SubxtMetadata, OnlineClient, backend::rpc::RpcClient, client::RuntimeVersion as SubxtRuntimeVersion, config::substrate::H256, @@ -75,7 +80,6 @@ use subxt_signer::eth::Keypair; use tokio::try_join; pub const CLIENT_VERSION: &str = concat!("anvil-polkadot/v", env!("CARGO_PKG_VERSION")); -const MAX_EXTRINSIC_DEPTH: u32 = 256; pub struct ApiServer { eth_rpc_client: EthRpcClient, @@ -339,18 +343,18 @@ impl ApiServer { self.get_account_info(addr, block).await.to_rpc_result() } //------- Transaction Pool --------- - EthRequest::TxPoolStatus(_) => { - node_info!("txpool_status"); - self.txpool_status().await.to_rpc_result() - } + EthRequest::TxPoolStatus(_) => self.txpool_status().await.to_rpc_result(), + EthRequest::TxPoolInspect(_) => self.txpool_inspect().await.to_rpc_result(), + EthRequest::TxPoolContent(_) => self.txpool_content().await.to_rpc_result(), EthRequest::DropAllTransactions() => { - node_info!("anvil_dropAllTransactions"); self.anvil_drop_all_transactions().await.to_rpc_result() } EthRequest::DropTransaction(eth_hash) => { - node_info!("anvil_dropTransaction"); self.anvil_drop_transaction(eth_hash).await.to_rpc_result() } + EthRequest::RemovePoolTransactions(address) => { + self.anvil_remove_pool_transactions(address).await.to_rpc_result() + } // --- Metadata --- EthRequest::NodeInfo(_) => self.anvil_node_info().await.to_rpc_result(), EthRequest::AnvilMetadata(_) => self.anvil_metadata().await.to_rpc_result(), @@ -1247,12 +1251,58 @@ impl ApiServer { /// Returns transaction pool status async fn txpool_status(&self) -> Result { + node_info!("txpool_status"); let pool_status = self.tx_pool.status(); Ok(TxpoolStatus { pending: pool_status.ready as u64, queued: pool_status.future as u64 }) } + /// Returns a summary of all transactions in the pool + async fn txpool_inspect(&self) -> Result { + node_info!("txpool_inspect"); + let mut inspect = TxpoolInspect::default(); + + for tx in self.tx_pool.ready() { + if let Some((sender, nonce, summary)) = extract_tx_summary(tx.data()) { + let entry = inspect.pending.entry(sender).or_default(); + entry.insert(nonce.to_string(), summary); + } + } + + for tx in self.tx_pool.futures() { + if let Some((sender, nonce, summary)) = extract_tx_summary(tx.data()) { + let entry = inspect.queued.entry(sender).or_default(); + entry.insert(nonce.to_string(), summary); + } + } + + Ok(inspect) + } + + /// Returns full transaction details for all transactions in the pool + async fn txpool_content(&self) -> Result> { + node_info!("txpool_content"); + let mut content = TxpoolContent::default(); + + for tx in self.tx_pool.ready() { + if let Some((sender, nonce, tx_info)) = extract_tx_info(tx.data()) { + let entry = content.pending.entry(sender).or_default(); + entry.insert(nonce.to_string(), tx_info); + } + } + + for tx in self.tx_pool.futures() { + if let Some((sender, nonce, tx_info)) = extract_tx_info(tx.data()) { + let entry = content.queued.entry(sender).or_default(); + entry.insert(nonce.to_string(), tx_info); + } + } + + Ok(content) + } + /// Drop all transactions from pool async fn anvil_drop_all_transactions(&self) -> Result<()> { + node_info!("anvil_dropAllTransactions"); let ready_txs = self.tx_pool.ready(); let future_txs = self.tx_pool.futures(); @@ -1273,7 +1323,7 @@ impl ApiServer { /// Drop a specific transaction from the pool by its ETH hash async fn anvil_drop_transaction(&self, eth_hash: B256) -> Result> { - // Search in ready transactions + node_info!("anvil_dropTransaction"); for tx in self.tx_pool.ready() { if transaction_matches_eth_hash(tx.data(), eth_hash) { let mut invalid_txs = IndexMap::new(); @@ -1283,7 +1333,6 @@ impl ApiServer { } } - // Search in future transactions for tx in self.tx_pool.futures() { if transaction_matches_eth_hash(tx.data(), eth_hash) { let mut invalid_txs = IndexMap::new(); @@ -1296,30 +1345,34 @@ impl ApiServer { // Transaction not found Ok(None) } -} -/// Helper function to check if transaction matches ETH hash -fn transaction_matches_eth_hash( - tx_data: &Arc, - target_eth_hash: B256, -) -> bool { - let encoded = tx_data.encode(); - let Ok(ext) = - UncheckedExtrinsic::decode_all_with_depth_limit(MAX_EXTRINSIC_DEPTH, &mut &encoded[..]) - else { - return false; - }; + /// Remove all transactions from a specific sender address + async fn anvil_remove_pool_transactions(&self, address: Address) -> Result<()> { + node_info!("anvil_removePoolTransactions"); + let mut invalid_txs = IndexMap::new(); - let polkadot_sdk::sp_runtime::generic::UncheckedExtrinsic { - function: RuntimeCall::Revive(polkadot_sdk::pallet_revive::Call::eth_transact { payload }), - .. - } = ext.0 - else { - return false; - }; + for tx in self.tx_pool.ready() { + if let Some(sender) = extract_sender(tx.data()) { + if sender == address { + invalid_txs.insert(*tx.hash(), None); + } + } + } + + for tx in self.tx_pool.futures() { + if let Some(sender) = extract_sender(tx.data()) { + if sender == address { + invalid_txs.insert(*tx.hash(), None); + } + } + } - let tx_eth_hash = keccak_256(&payload); - B256::from_slice(&tx_eth_hash) == target_eth_hash + if !invalid_txs.is_empty() { + self.tx_pool.report_invalid(None, invalid_txs).await; + } + + Ok(()) + } } fn new_contract_info(address: &Address, code_hash: H256, nonce: Nonce) -> ContractInfo { @@ -1437,16 +1490,7 @@ async fn create_revive_rpc_client( let receipt_extractor = ReceiptExtractor::new_with_custom_address_recovery( api.clone(), None, - Arc::new(|signed_tx: &TransactionSigned| { - let sig = signed_tx.raw_signature()?; - if sig[..12] == [0; 12] && sig[32..64] == [0; 32] { - let mut res = [0; 20]; - res.copy_from_slice(&sig[12..32]); - Ok(H160::from(res)) - } else { - signed_tx.recover_eth_address() - } - }), + Arc::new(recover_maybe_impersonated_address), ) .await .map_err(|err| Error::ReviveRpc(EthRpcError::ClientError(err)))?; diff --git a/crates/anvil-polkadot/src/api_server/txpool_helpers.rs b/crates/anvil-polkadot/src/api_server/txpool_helpers.rs new file mode 100644 index 0000000000000..d22eb94a14e01 --- /dev/null +++ b/crates/anvil-polkadot/src/api_server/txpool_helpers.rs @@ -0,0 +1,200 @@ +//! Helper functions for txpool RPC methods +//! +//! This module contains utilities for extracting transaction information from +//! Substrate extrinsics, including support for impersonated transactions with +//! fake signatures. + +use alloy_primitives::{Address, B256, U256, keccak256}; +use alloy_rpc_types::txpool::TxpoolInspectSummary; +use codec::{DecodeLimit, Encode}; +use polkadot_sdk::{ + pallet_revive::evm::TransactionSigned, + sp_core::{self, H256}, +}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use substrate_runtime::{RuntimeCall, UncheckedExtrinsic}; + +use crate::substrate_node::host::recover_maybe_impersonated_address; + +const MAX_EXTRINSIC_DEPTH: u32 = 256; + +/// Transaction info for txpool RPCs with Option fields to match Anvil's null values +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TxpoolTransactionInfo { + pub hash: H256, + pub block_hash: Option, + pub block_number: Option, + pub transaction_index: Option, + pub from: sp_core::H160, + pub transaction_signed: TransactionSigned, +} + +/// Decode extrinsic into ETH transaction payload and signed transaction +pub(super) fn decode_eth_transaction( + tx_data: &Arc, +) -> Option<(Vec, TransactionSigned)> { + let encoded = tx_data.encode(); + let ext = + UncheckedExtrinsic::decode_all_with_depth_limit(MAX_EXTRINSIC_DEPTH, &mut &encoded[..]) + .ok()?; + + let polkadot_sdk::sp_runtime::generic::UncheckedExtrinsic { + function: RuntimeCall::Revive(polkadot_sdk::pallet_revive::Call::eth_transact { payload }), + .. + } = ext.0 + else { + return None; + }; + + let signed_tx = TransactionSigned::decode(&payload).ok()?; + + Some((payload, signed_tx)) +} + +/// Check if transaction matches ETH hash +pub(super) fn transaction_matches_eth_hash( + tx_data: &Arc, + target_eth_hash: B256, +) -> bool { + let Some((payload, _signed_tx)) = decode_eth_transaction(tx_data) else { + return false; + }; + + let tx_eth_hash = keccak256(&payload); + B256::from_slice(tx_eth_hash.as_ref()) == target_eth_hash +} + +/// Fields extracted from an Ethereum transaction +pub(super) struct TransactionFields { + pub nonce: sp_core::U256, + pub to: Option, + pub value: sp_core::U256, + pub gas: sp_core::U256, + pub gas_price: sp_core::U256, +} + +/// Extract fields from ETH transaction +fn extract_tx_fields(signed_tx: &TransactionSigned) -> TransactionFields { + match signed_tx { + TransactionSigned::TransactionLegacySigned(tx) => { + let t = &tx.transaction_legacy_unsigned; + TransactionFields { + nonce: t.nonce, + to: t.to, + value: t.value, + gas: t.gas, + gas_price: t.gas_price, + } + } + TransactionSigned::Transaction2930Signed(tx) => { + let t = &tx.transaction_2930_unsigned; + TransactionFields { + nonce: t.nonce, + to: t.to, + value: t.value, + gas: t.gas, + gas_price: t.gas_price, + } + } + TransactionSigned::Transaction1559Signed(tx) => { + let t = &tx.transaction_1559_unsigned; + TransactionFields { + nonce: t.nonce, + to: t.to, + value: t.value, + gas: t.gas, + gas_price: t.max_fee_per_gas, + } + } + TransactionSigned::Transaction4844Signed(tx) => { + let t = &tx.transaction_4844_unsigned; + TransactionFields { + nonce: t.nonce, + to: Some(t.to), + value: t.value, + gas: t.gas, + gas_price: t.max_fee_per_gas, + } + } + TransactionSigned::Transaction7702Signed(tx) => { + let t = &tx.transaction_7702_unsigned; + TransactionFields { + nonce: t.nonce, + to: Some(t.to), + value: t.value, + gas: t.gas, + gas_price: t.max_fee_per_gas, + } + } + } +} + +/// Extract transaction summary from extrinsic +pub(super) fn extract_tx_summary( + tx_data: &Arc, +) -> Option<(Address, u64, TxpoolInspectSummary)> { + let (_payload, signed_tx) = decode_eth_transaction(tx_data)?; + + let from = recover_maybe_impersonated_address(&signed_tx).ok()?; + let sender = Address::from_slice(from.as_bytes()); + + let fields = extract_tx_fields(&signed_tx); + + let to_addr = fields.to.map(|addr| Address::from_slice(addr.as_bytes())); + let value_u256 = U256::from_limbs(fields.value.0); + let gas_u64 = fields.gas.as_u64(); + let gas_price_u128 = fields.gas_price.as_u128(); + let nonce_u64 = fields.nonce.as_u64(); + + Some(( + sender, + nonce_u64, + TxpoolInspectSummary { + to: to_addr, + value: value_u256, + gas: gas_u64, + gas_price: gas_price_u128, + }, + )) +} + +/// Extract full transaction info from extrinsic +pub(super) fn extract_tx_info( + tx_data: &Arc, +) -> Option<(Address, u64, TxpoolTransactionInfo)> { + let (payload, signed_tx) = decode_eth_transaction(tx_data)?; + + let eth_hash = keccak256(&payload); + let eth_hash_h256 = H256::from_slice(eth_hash.as_ref()); + + let from = recover_maybe_impersonated_address(&signed_tx).ok()?; + let sender = Address::from_slice(from.as_bytes()); + + let fields = extract_tx_fields(&signed_tx); + let nonce_u64 = fields.nonce.as_u64(); + + let tx_info = TxpoolTransactionInfo { + hash: eth_hash_h256, + block_hash: None, + block_number: None, + transaction_index: None, + from, + transaction_signed: signed_tx, + }; + + Some((sender, nonce_u64, tx_info)) +} + +/// Extract sender address from extrinsic as Alloy Address type. +/// Helper for `anvil_remove_pool_transactions` to compare sender addresses. +pub(super) fn extract_sender( + tx_data: &Arc, +) -> Option
{ + let (_payload, signed_tx) = decode_eth_transaction(tx_data)?; + + let from = recover_maybe_impersonated_address(&signed_tx).ok()?; + let sender = Address::from_slice(from.as_bytes()); + + Some(sender) +} diff --git a/crates/anvil-polkadot/src/substrate_node/host.rs b/crates/anvil-polkadot/src/substrate_node/host.rs index 6e43bacf516b8..434c06a78931d 100644 --- a/crates/anvil-polkadot/src/substrate_node/host.rs +++ b/crates/anvil-polkadot/src/substrate_node/host.rs @@ -28,10 +28,27 @@ use sp_runtime_interface::{ // The host functions in this module expect transactions // with fake signatures conforming the format checked in this function. -fn is_impersonated(sig: &[u8]) -> bool { +pub fn is_impersonated(sig: &[u8]) -> bool { sig[..12] == [0; 12] && sig[32..64] == [0; 32] } +/// Recover sender address from signed transaction, handling impersonated transactions. +/// For impersonated transactions (fake signatures), extracts the address embedded in the signature. +/// For normal transactions, performs standard ECDSA recovery. +#[allow(clippy::result_unit_err)] +pub fn recover_maybe_impersonated_address( + signed_tx: &polkadot_sdk::pallet_revive::evm::TransactionSigned, +) -> Result { + let sig = signed_tx.raw_signature()?; + if is_impersonated(&sig) { + let mut res = [0; 20]; + res.copy_from_slice(&sig[12..32]); + Ok(polkadot_sdk::sp_core::H160::from(res)) + } else { + signed_tx.recover_eth_address() + } +} + #[runtime_interface] pub trait Crypto { #[version(1)] diff --git a/crates/anvil-polkadot/tests/it/txpool.rs b/crates/anvil-polkadot/tests/it/txpool.rs index 42c6d6cffe210..6ba5c51ec336a 100644 --- a/crates/anvil-polkadot/tests/it/txpool.rs +++ b/crates/anvil-polkadot/tests/it/txpool.rs @@ -1,9 +1,12 @@ use crate::utils::{TestNode, unwrap_response}; use alloy_primitives::{Address, B256, U256}; -use alloy_rpc_types::{TransactionRequest, txpool::TxpoolStatus}; +use alloy_rpc_types::{ + TransactionRequest, + txpool::{TxpoolContent, TxpoolInspect, TxpoolStatus}, +}; use anvil_core::eth::EthRequest; use anvil_polkadot::{ - api_server::revive_conversions::ReviveAddress, + api_server::{TxpoolTransactionInfo, revive_conversions::ReviveAddress}, config::{AnvilNodeConfig, SubstrateNodeConfig}, }; use polkadot_sdk::pallet_revive::evm::Account; @@ -146,3 +149,286 @@ async fn test_drop_all_transactions() { assert_eq!(status.pending, 0); assert_eq!(status.queued, 0); } + +#[tokio::test(flavor = "multi_thread")] +async fn test_txpool_inspect() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + let recipient_addr = Address::repeat_byte(0x42); + + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + assert!(inspect.pending.is_empty()); + assert!(inspect.queued.is_empty()); + + for i in 0..3 { + let tx = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + node.send_transaction(tx, None).await.unwrap(); + } + + let tx_future = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(5000)) + .nonce(5); + node.send_transaction(tx_future, None).await.unwrap(); + + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + + assert_eq!(inspect.pending.len(), 1); + assert_eq!(inspect.queued.len(), 1); + + // Get current block to verify gas_price >= base_fee_per_gas + let block_number = node.best_block_number().await; + let block_hash = node.block_hash_by_number(block_number).await.unwrap(); + let block = node.get_block_by_hash(block_hash).await; + let base_fee = block.base_fee_per_gas.as_u128(); + + let pending_txs = inspect.pending.get(&alith_addr).unwrap(); + assert_eq!(pending_txs.len(), 3); + + for i in 0..3 { + let summary = pending_txs.get(&i.to_string()).unwrap(); + assert_eq!(summary.to.unwrap(), recipient_addr); + assert_eq!(summary.value, U256::from(1000 * (i + 1))); + assert!(summary.gas > 0); + assert!(summary.gas_price >= base_fee); + } + + let queued_txs = inspect.queued.get(&alith_addr).unwrap(); + assert_eq!(queued_txs.len(), 1); + + let summary = queued_txs.get("5").unwrap(); + assert_eq!(summary.to.unwrap(), recipient_addr); + assert_eq!(summary.value, U256::from(5000)); + assert!(summary.gas > 0); + assert!(summary.gas_price >= base_fee); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_txpool_content() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + let recipient_addr = Address::repeat_byte(0x42); + + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + assert!(content.pending.is_empty()); + assert!(content.queued.is_empty()); + + let mut pending_hashes = vec![]; + for i in 0..3 { + let tx = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + let hash = node.send_transaction(tx, None).await.unwrap(); + pending_hashes.push(hash); + } + + let tx_future = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(5000)) + .nonce(5); + let queued_hash = node.send_transaction(tx_future, None).await.unwrap(); + + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + + assert_eq!(content.pending.len(), 1); + assert_eq!(content.queued.len(), 1); + + let pending_txs = content.pending.get(&alith_addr).unwrap(); + assert_eq!(pending_txs.len(), 3); + + for i in 0..3 { + let tx_info = pending_txs.get(&i.to_string()).unwrap(); + let from_addr = Address::from_slice(tx_info.from.as_bytes()); + assert_eq!(from_addr, alith_addr); + + let expected_hash = B256::from_slice(pending_hashes[i as usize].0.as_ref()); + let actual_hash = B256::from_slice(tx_info.hash.as_ref()); + assert_eq!(actual_hash, expected_hash); + + // Pending transactions should have None for block-related fields + assert_eq!(tx_info.block_hash, None); + assert_eq!(tx_info.block_number, None); + assert_eq!(tx_info.transaction_index, None); + } + + let queued_txs = content.queued.get(&alith_addr).unwrap(); + assert_eq!(queued_txs.len(), 1); + + let tx_info = queued_txs.get("5").unwrap(); + let from_addr = Address::from_slice(tx_info.from.as_bytes()); + assert_eq!(from_addr, alith_addr); + + let expected_hash = B256::from_slice(queued_hash.0.as_ref()); + let actual_hash = B256::from_slice(tx_info.hash.as_ref()); + assert_eq!(actual_hash, expected_hash); + + // Queued transactions should also have None for block-related fields + assert_eq!(tx_info.block_hash, None); + assert_eq!(tx_info.block_number, None); + assert_eq!(tx_info.transaction_index, None); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_remove_pool_transactions() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + + let baltathar = Account::from(subxt_signer::eth::dev::baltathar()); + let baltathar_addr = Address::from(ReviveAddress::new(baltathar.address())); + + let recipient_addr = Address::repeat_byte(0x42); + + // Send 3 transactions from Alith + for i in 0..3 { + let tx = TransactionRequest::default() + .from(alith_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + node.send_transaction(tx, None).await.unwrap(); + } + + // Send 2 transactions from Baltathar + for i in 0..2 { + let tx = TransactionRequest::default() + .from(baltathar_addr) + .to(recipient_addr) + .value(U256::from(2000 * (i + 1))) + .nonce(i); + node.send_transaction(tx, None).await.unwrap(); + } + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 5); + assert_eq!(status.queued, 0); + + // Remove all transactions from Alith + unwrap_response::<()>( + node.eth_rpc(EthRequest::RemovePoolTransactions(alith_addr)).await.unwrap(), + ) + .unwrap(); + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 2); + assert_eq!(status.queued, 0); + + // Verify only Baltathar's transactions remain + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + + assert_eq!(content.pending.len(), 1); + assert!(content.pending.contains_key(&baltathar_addr)); + assert!(!content.pending.contains_key(&alith_addr)); + + let baltathar_txs = content.pending.get(&baltathar_addr).unwrap(); + assert_eq!(baltathar_txs.len(), 2); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_txpool_with_impersonated_transactions() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_addr = Address::from(ReviveAddress::new(alith.address())); + + let dorothy = Account::from(subxt_signer::eth::dev::dorothy()); + let impersonated_addr = Address::from(ReviveAddress::new(dorothy.address())); + let recipient_addr = Address::repeat_byte(0x42); + + // Fund dorothy account (dorothy is not initialized in genesis) + let fund_tx = TransactionRequest::default() + .from(alith_addr) + .to(impersonated_addr) + .value(U256::from(10000000000000000000u64)); + node.send_transaction(fund_tx, None).await.unwrap(); + + // Mine the funding transaction + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + + unwrap_response::<()>( + node.eth_rpc(EthRequest::ImpersonateAccount(impersonated_addr)).await.unwrap(), + ) + .unwrap(); + + for i in 0..3 { + let tx = TransactionRequest::default() + .from(impersonated_addr) + .to(recipient_addr) + .value(U256::from(1000 * (i + 1))) + .nonce(i); + node.send_unsigned_transaction(tx, None).await.unwrap(); + } + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 3); + assert_eq!(status.queued, 0); + + // Test txpool_inspect (uses extract_tx_summary with impersonation support) + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + assert_eq!(inspect.pending.len(), 1); + assert!(inspect.pending.contains_key(&impersonated_addr)); + + let impersonated_txs = inspect.pending.get(&impersonated_addr).unwrap(); + assert_eq!(impersonated_txs.len(), 3); + + // Test txpool_content (uses extract_tx_info with impersonation support) + let content: TxpoolContent = + unwrap_response(node.eth_rpc(EthRequest::TxPoolContent(())).await.unwrap()).unwrap(); + assert_eq!(content.pending.len(), 1); + + let pending_txs = content.pending.get(&impersonated_addr).unwrap(); + assert_eq!(pending_txs.len(), 3); + + for i in 0..3 { + let tx_info = pending_txs.get(&i.to_string()).unwrap(); + let from_addr = Address::from_slice(tx_info.from.as_bytes()); + assert_eq!(from_addr, impersonated_addr); + assert!(tx_info.hash != Default::default()); + } + + // Test anvil_removePoolTransactions (uses extract_sender with impersonation support) + unwrap_response::<()>( + node.eth_rpc(EthRequest::RemovePoolTransactions(impersonated_addr)).await.unwrap(), + ) + .unwrap(); + + let status: TxpoolStatus = + unwrap_response(node.eth_rpc(EthRequest::TxPoolStatus(())).await.unwrap()).unwrap(); + assert_eq!(status.pending, 0); + assert_eq!(status.queued, 0); + + unwrap_response::<()>( + node.eth_rpc(EthRequest::StopImpersonatingAccount(impersonated_addr)).await.unwrap(), + ) + .unwrap(); +} From b6a247fb813492bfb84f1626e9b26455ac12b176 Mon Sep 17 00:00:00 2001 From: Diego Date: Tue, 4 Nov 2025 22:58:14 -0300 Subject: [PATCH 08/44] Fix clippy and fmt --- .../substrate_node/lazy_loading/backend.rs | 188 +++++++----------- crates/script/src/progress.rs | 2 +- 2 files changed, 75 insertions(+), 115 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index d7f8c0f7b6abe..88b00b45ccc3d 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -48,35 +48,35 @@ impl StoredBlock { just: Option, ) -> Self { match body { - Some(body) => StoredBlock::Full(B::new(header, body), just), - None => StoredBlock::Header(header, just), + Some(body) => Self::Full(B::new(header, body), just), + None => Self::Header(header, just), } } fn header(&self) -> &B::Header { match *self { - StoredBlock::Header(ref h, _) => h, - StoredBlock::Full(ref b, _) => b.header(), + Self::Header(ref h, _) => h, + Self::Full(ref b, _) => b.header(), } } fn justifications(&self) -> Option<&Justifications> { match *self { - StoredBlock::Header(_, ref j) | StoredBlock::Full(_, ref j) => j.as_ref(), + Self::Header(_, ref j) | Self::Full(_, ref j) => j.as_ref(), } } fn extrinsics(&self) -> Option<&[B::Extrinsic]> { match *self { - StoredBlock::Header(_, _) => None, - StoredBlock::Full(ref b, _) => Some(b.extrinsics()), + Self::Header(_, _) => None, + Self::Full(ref b, _) => Some(b.extrinsics()), } } fn into_inner(self) -> (B::Header, Option>, Option) { match self { - StoredBlock::Header(header, just) => (header, None, just), - StoredBlock::Full(block, just) => { + Self::Header(header, just) => (header, None, just), + Self::Full(block, just) => { let (header, body) = block.deconstruct(); (header, Some(body), just) } @@ -107,7 +107,7 @@ pub struct Blockchain { impl Blockchain { /// Create new in-memory blockchain storage. - fn new(rpc_client: Option>>) -> Blockchain { + fn new(rpc_client: Option>>) -> Self { log::info!( target: super::LAZY_LOADING_LOG_TARGET, "🏗️ Creating new Blockchain storage (empty)" @@ -125,7 +125,7 @@ impl Blockchain { leaves: LeafSet::new(), aux: HashMap::new(), })); - Blockchain { rpc_client, storage } + Self { rpc_client, storage } } #[inline] fn rpc(&self) -> Option<&dyn RPCClient> { @@ -139,10 +139,10 @@ impl Blockchain { BlockId::Number(n) => { log::info!( target: super::LAZY_LOADING_LOG_TARGET, - "Looking up block hash for number={}", n + "Looking up block hash for number={n}", ); - let block_hash = self.storage.read().hashes.get(&n).cloned(); + let block_hash = self.storage.read().hashes.get(&n).copied(); log::info!( target: super::LAZY_LOADING_LOG_TARGET, @@ -156,7 +156,7 @@ impl Blockchain { None => { log::info!( target: super::LAZY_LOADING_LOG_TARGET, - "Block hash not found locally, trying RPC for number={}", n + "Block hash not found locally, trying RPC for number={n}", ); let block_hash = self.rpc().and_then(|rpc| rpc.block_hash(Some(n)).ok().flatten()); @@ -184,10 +184,7 @@ impl Blockchain { log::info!( target: super::LAZY_LOADING_LOG_TARGET, - "Inserting block: number={}, hash={:?}, new_state={:?}", - number, - hash, - new_state + "Inserting block: number={number}, hash={hash:?}, new_state={new_state:?}", ); if new_state.is_best() { @@ -234,8 +231,7 @@ impl Blockchain { log::debug!( target: super::LAZY_LOADING_LOG_TARGET, - "Total number of blocks: {:?}", - count + "Total number of blocks: {count:?}", ); count @@ -273,7 +269,7 @@ impl Blockchain { pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { let header = self .header(hash)? - .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{}", hash)))?; + .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{hash:?}")))?; self.apply_head(&header) } @@ -437,9 +433,9 @@ impl HeaderBackend for Blockchain match rpc.block(Some(hash)) { Ok(Some(block)) => Ok(Some(*block.block.header().number())), - err => Err(sp_blockchain::Error::UnknownBlock( - format!("Failed to fetch block number from RPC: {:?}", err).into(), - )), + err => Err(sp_blockchain::Error::UnknownBlock(format!( + "Failed to fetch block number from RPC: {err:?}" + ))), }, None => Err(sp_blockchain::Error::UnknownBlock( "RPC not configured to resolve block number".into(), @@ -463,7 +459,7 @@ impl HeaderMetadata for Blockchain Result, Self::Error> { self.header(hash)?.map(|header| CachedHeaderMetadata::from(&header)).ok_or_else(|| { - sp_blockchain::Error::UnknownBlock(format!("header not found: {}", hash)) + sp_blockchain::Error::UnknownBlock(format!("header not found: {hash:?}")) }) } @@ -662,7 +658,7 @@ impl backend::BlockImportOperation update: StorageCollection, _child_update: ChildStorageCollection, ) -> sp_blockchain::Result<()> { - self.storage_updates = update.clone(); + self.storage_updates = update; Ok(()) } @@ -707,10 +703,6 @@ pub struct RawIterArgs { /// /// This is inclusive and the iteration will include the key which is specified here. pub start_at: Option>, - - /// If this is `true` then the iteration will *not* include - /// the key specified in `start_at`, if there is such a key. - pub start_at_exclusive: bool, } /// A raw iterator over the `BenchmarkingState`. @@ -737,11 +729,11 @@ impl sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator Some(Ok((next_key, value))), - _ => None, - } + maybe_value.map(|value| Ok((next_key, value))) } else { self.complete = true; None @@ -1032,7 +1016,7 @@ impl sp_state_machine::Backend rpc .storage_hash(StorageKey(key.to_vec()), block) - .map_err(|e| format!("Failed to fetch storage hash from RPC: {:?}", e).into()), + .map_err(|e| format!("Failed to fetch storage hash from RPC: {e:?}")), None => Ok(None), } }; @@ -1106,7 +1090,7 @@ impl sp_state_machine::Backend sp_state_machine::Backend sp_state_machine::Backend) -> Result { - let mut clone: RawIterArgs = Default::default(); - clone.start_at_exclusive = args.start_at_exclusive.clone(); - clone.prefix = args.prefix.map(|v| v.to_vec()); - clone.start_at = args.start_at.map(|v| v.to_vec()); + let clone = RawIterArgs { + prefix: args.prefix.map(|v| v.to_vec()), + start_at: args.start_at.map(|v| v.to_vec()), + }; Ok(RawIter:: { args: clone, complete: false, _phantom: Default::default() }) } @@ -1219,7 +1203,7 @@ pub struct Backend { impl Backend { fn new(rpc_client: Option>>, fork_checkpoint: Block::Header) -> Self { - Backend { + Self { rpc_client: rpc_client.clone(), states: Default::default(), blockchain: Blockchain::new(rpc_client), @@ -1309,7 +1293,7 @@ impl backend::Backend for Backend, operation.storage_updates)], StateVersion::V1); let new_state = ForkedLazyBackend { rpc_client: self.rpc_client.clone(), - block_hash: Some(hash.clone()), + block_hash: Some(hash), fork_block: self.fork_checkpoint.hash(), db: new_db, removed_keys: new_removed_keys, @@ -1381,9 +1365,9 @@ impl backend::Backend for Backend { + pub struct Rpc { pub counters: std::sync::Arc, /// storage[(block_hash, key)] = value - pub storage: std::sync::Arc< - parking_lot::RwLock>, - >, + pub storage: Arc>>, /// storage_hash[(block_hash, key)] = hash - pub storage_hashes: std::sync::Arc< - parking_lot::RwLock>, - >, + pub storage_hashes: + Arc>>, /// storage_keys_paged[(block_hash, (prefix,start))] = Vec - pub storage_keys_pages: std::sync::Arc< - parking_lot::RwLock), Vec>>, - >, + pub storage_keys_pages: + Arc), Vec>>>, /// headers[hash] = header - pub headers: std::sync::Arc>>, + pub headers: Arc>>, /// blocks[hash] = SignedBlock - pub blocks: - std::sync::Arc>>>, - /// block_hash_by_number[n] = hash - pub block_hash_by_number: - std::sync::Arc>>, + pub blocks: Arc>>>, } - impl RPC { + impl Rpc { pub fn new() -> Self { Self { counters: std::sync::Arc::new(Counters::default()), @@ -1564,9 +1539,6 @@ mod tests { )), headers: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), blocks: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - block_hash_by_number: std::sync::Arc::new(parking_lot::RwLock::new( - BTreeMap::new(), - )), } } @@ -1590,7 +1562,7 @@ mod tests { } } - impl RPCClient for RPC { + impl RPCClient for Rpc { fn storage( &self, key: StorageKey, @@ -1598,7 +1570,7 @@ mod tests { ) -> Result, jsonrpsee::core::ClientError> { self.counters.storage_calls.fetch_add(1, Ordering::Relaxed); let map = self.storage.read(); - Ok(map.get(&(at.unwrap_or_default(), key.clone())).cloned()) + Ok(map.get(&(at.unwrap_or_default(), key)).cloned()) } fn storage_hash( @@ -1609,7 +1581,7 @@ mod tests { self.counters.storage_hash_calls.fetch_add(1, Ordering::Relaxed); let bh = at.unwrap_or_default(); let map = self.storage_hashes.read(); - Ok(map.get(&(bh, key.clone())).cloned()) + Ok(map.get(&(bh, key)).copied()) } fn storage_keys_paged( @@ -1625,31 +1597,25 @@ mod tests { use std::cmp::min; let bh = at.unwrap_or_default(); - let prefix = key.map(|k| k.0).unwrap_or_default(); // ✅ usar el prefix correcto + let prefix = key.map(|k| k.0).unwrap_or_default(); let start = start_key.map(|k| k.0); let map = self.storage_keys_pages.read(); let mut all = map.get(&(bh, prefix.clone())).cloned().unwrap_or_default(); - // Asegurar orden determinista (lexicográfico por bytes) all.sort_by(|a, b| a.0.cmp(&b.0)); - // Filtrar por prefix (defensivo) let mut filtered: Vec = all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); - // Aplicar start_key EXCLUSIVO: devolver solo las > start if let Some(s) = start { - // buscar posición exacta, si existe if let Some(pos) = filtered.iter().position(|k| k.0 == s) { filtered = filtered.into_iter().skip(pos + 1).collect(); } else { - // si no está, devolver la primera mayor - filtered = filtered.into_iter().filter(|k| k.0 > s).collect(); + filtered.retain(|k| k.0 > s); } } - // Aplicar count let take = min(filtered.len(), count as usize); Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) } @@ -1676,7 +1642,7 @@ mod tests { fn block_hash( &self, - num: Option>, + _num: Option>, ) -> Result, jsonrpsee::core::ClientError> { todo!() } @@ -1698,13 +1664,7 @@ mod tests { type TestBlockT = TestBlock; fn make_header(number: N, parent: ::Hash) -> TestHeader { - TestHeader::new( - number.into(), - Default::default(), - Default::default(), - parent, - Default::default(), - ) + TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) } fn make_block( @@ -1722,10 +1682,10 @@ mod tests { #[test] fn before_fork_reads_remote_only() { - let rpc = std::sync::Arc::new(RPC::new()); + let rpc = std::sync::Arc::new(Rpc::new()); // fork checkpoint at #100 let cp = checkpoint(100); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let backend = Backend::::new(Some(rpc.clone()), cp); // state_at(Default::default()) => before_fork=true let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); @@ -1747,7 +1707,7 @@ mod tests { #[test] fn after_fork_first_fetch_caches_subsequent_hits_local() { - let rpc = std::sync::Arc::new(RPC::new()); + let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(10); let backend = Backend::::new(Some(rpc.clone()), cp.clone()); @@ -1757,7 +1717,7 @@ mod tests { let h11 = b11.header.hash(); rpc.put_header(b11.header.clone()); - rpc.put_block(b11.clone(), None); + rpc.put_block(b11, None); // remote storage at fork block (checkpoint hash) let fork_hash = cp.hash(); @@ -1782,7 +1742,7 @@ mod tests { #[test] fn removed_keys_prevents_remote_fetch() { - let rpc = std::sync::Arc::new(RPC::new()); + let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(5); let backend = Backend::::new(Some(rpc.clone()), cp.clone()); @@ -1808,7 +1768,7 @@ mod tests { #[test] fn raw_iter_merges_local_then_remote() { - let rpc = std::sync::Arc::new(RPC::new()); + let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(7); let backend = Backend::::new(Some(rpc.clone()), cp.clone()); @@ -1850,16 +1810,16 @@ mod tests { #[test] fn blockchain_header_and_number_are_cached() { - let rpc = std::sync::Arc::new(RPC::new()); + let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(3); let backend = Backend::::new(Some(rpc.clone()), cp.clone()); let chain = backend.blockchain(); // prepare one block w/ extrinsics let xts: Vec = vec![]; - let b4 = make_block(4, cp.hash(), xts.clone()); + let b4 = make_block(4, cp.hash(), xts); let h4 = b4.header().hash(); - rpc.put_block(b4.clone(), None); + rpc.put_block(b4, None); // first header() fetches RPC and caches as Full let h = chain.header(h4).unwrap().unwrap(); diff --git a/crates/script/src/progress.rs b/crates/script/src/progress.rs index 09329ad6b05ab..42cf51784e569 100644 --- a/crates/script/src/progress.rs +++ b/crates/script/src/progress.rs @@ -171,7 +171,7 @@ impl ScriptProgress { progress } - /// Traverses a set of pendings and either finds receipts, or clears them from + /// Traverses a set of pending and either finds receipts, or clears them from /// the deployment sequence. /// /// For each `tx_hash`, we check if it has confirmed. If it has From 760cc7df9e01bb165c6a4a01f376c1c3398284a1 Mon Sep 17 00:00:00 2001 From: Diego Date: Tue, 4 Nov 2025 23:32:38 -0300 Subject: [PATCH 09/44] Fix some tests --- .../substrate_node/lazy_loading/backend.rs | 86 ++++++++++++------- 1 file changed, 54 insertions(+), 32 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index 88b00b45ccc3d..43b5a081830ee 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -755,21 +755,29 @@ impl sp_state_machine::StorageIterator sp_state_machine::StorageIterator Date: Wed, 5 Nov 2025 10:34:19 +0200 Subject: [PATCH 10/44] fix getStorage and setStorage RPCs (#385) - getStorage should return empty data for an inexistent account (we were previously returning error) - setStorage should also work for inexistent accounts or EOAs --- .github/workflows/test.yml | 10 +- .../anvil-polkadot/src/api_server/server.rs | 69 +++++++++-- .../anvil-polkadot/tests/it/standard_rpc.rs | 10 ++ .../anvil-polkadot/tests/it/state_injector.rs | 116 +++++++++++------- crates/script/src/progress.rs | 2 +- 5 files changed, 149 insertions(+), 58 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 29362d25d5746..11ae96ec88e33 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -53,19 +53,19 @@ jobs: force_orphan: true doctest: - runs-on: ubuntu-latest + runs-on: parity-large-new timeout-minutes: 60 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@1.88.0 + - name: Install build tools + run: | + sudo apt-get update + sudo apt-get install -y clang libclang-dev unzip build-essential - name: Install protobuf-compiler uses: arduino/setup-protoc@v3 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - - name: Install clang on ubuntu - run: | - sudo apt-get update - sudo apt-get install -y clang libclang-dev - uses: dtolnay/rust-toolchain@1.88.0 with: target: wasm32-unknown-unknown diff --git a/crates/anvil-polkadot/src/api_server/server.rs b/crates/anvil-polkadot/src/api_server/server.rs index 3dc8332460da1..019500ffe2a0c 100644 --- a/crates/anvil-polkadot/src/api_server/server.rs +++ b/crates/anvil-polkadot/src/api_server/server.rs @@ -564,13 +564,21 @@ impl ApiServer { addr: Address, slot: U256, block: Option, - ) -> Result { + ) -> Result { node_info!("eth_getStorageAt"); let hash = self.get_block_hash_for_tag(block).await?; let runtime_api = self.eth_rpc_client.runtime_api(hash); - let bytes = - runtime_api.get_storage(ReviveAddress::from(addr).inner(), slot.to_be_bytes()).await?; - Ok(bytes.unwrap_or_default().into()) + let bytes: B256 = match runtime_api + .get_storage(ReviveAddress::from(addr).inner(), slot.to_be_bytes()) + .await + { + Ok(Some(bytes)) => bytes.as_slice().try_into().map_err(|_| { + Error::InternalError("Unable to convert value to 32-byte value".to_string()) + })?, + Ok(None) | Err(ClientError::ContractNotFound) => Default::default(), + Err(err) => return Err(Error::ReviveRpc(EthRpcError::ClientError(err))), + }; + Ok(bytes) } async fn get_code(&self, address: Address, block: Option) -> Result { @@ -994,15 +1002,58 @@ impl ApiServer { let latest_block = self.latest_block(); - let Some(ReviveAccountInfo { account_type: AccountType::Contract(contract_info), .. }) = - self.backend.read_revive_account_info(latest_block, address)? - else { - return Ok(()); + let account_id = self.get_account_id(latest_block, address)?; + + let maybe_system_account_info = + self.backend.read_system_account_info(latest_block, account_id.clone())?; + let nonce = maybe_system_account_info.as_ref().map(|info| info.nonce).unwrap_or_default(); + + if maybe_system_account_info.is_none() { + self.set_frame_system_balance( + latest_block, + account_id, + substrate_runtime::currency::DOLLARS, + )?; + } + + let trie_id = match self.backend.read_revive_account_info(latest_block, address)? { + // If the account doesn't exist, create one. + None => { + let contract_info = new_contract_info(&address, (*KECCAK_EMPTY).into(), nonce); + let trie_id = contract_info.trie_id.clone(); + + self.backend.inject_revive_account_info( + latest_block, + address, + ReviveAccountInfo { + account_type: AccountType::Contract(contract_info), + dust: 0, + }, + ); + + trie_id + } + // If the account is not already a contract account, make it one. + Some(ReviveAccountInfo { account_type: AccountType::EOA, dust }) => { + let contract_info = new_contract_info(&address, (*KECCAK_EMPTY).into(), nonce); + let trie_id = contract_info.trie_id.clone(); + + self.backend.inject_revive_account_info( + latest_block, + address, + ReviveAccountInfo { account_type: AccountType::Contract(contract_info), dust }, + ); + + trie_id + } + Some(ReviveAccountInfo { + account_type: AccountType::Contract(contract_info), .. + }) => contract_info.trie_id, }; self.backend.inject_child_storage( latest_block, - contract_info.trie_id.to_vec(), + trie_id.to_vec(), key.to_be_bytes_vec(), value.to_vec(), ); diff --git a/crates/anvil-polkadot/tests/it/standard_rpc.rs b/crates/anvil-polkadot/tests/it/standard_rpc.rs index 39ad1bf3aa5e6..8874292e5facd 100644 --- a/crates/anvil-polkadot/tests/it/standard_rpc.rs +++ b/crates/anvil-polkadot/tests/it/standard_rpc.rs @@ -599,6 +599,16 @@ async fn test_get_storage() { unwrap_response::<()>(node.eth_rpc(EthRequest::SetAutomine(true)).await.unwrap()).unwrap(); let alith = Account::from(subxt_signer::eth::dev::alith()); + // Test retrieving the storage of an EOA account (alith) + let stored_value = node.get_storage_at(U256::from(0), alith.address()).await; + assert_eq!(stored_value, 0); + + // Test retrieving the storage of a non-existant account. + let random_addr = Address::random(); + let stored_value = + node.get_storage_at(U256::from(0), ReviveAddress::from(random_addr).inner()).await; + assert_eq!(stored_value, 0); + let contract_code = get_contract_code("SimpleStorage"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), Some(1)).await; tokio::time::sleep(Duration::from_millis(400)).await; diff --git a/crates/anvil-polkadot/tests/it/state_injector.rs b/crates/anvil-polkadot/tests/it/state_injector.rs index 69b2930423fe0..77b270f709ae1 100644 --- a/crates/anvil-polkadot/tests/it/state_injector.rs +++ b/crates/anvil-polkadot/tests/it/state_injector.rs @@ -595,51 +595,81 @@ async fn test_set_storage() { let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); let alith = Account::from(subxt_signer::eth::dev::alith()); - let contract_code = get_contract_code("SimpleStorage"); - let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; - unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(std::time::Duration::from_millis(400)).await; - let receipt = node.get_transaction_receipt(tx_hash).await; - let contract_address = receipt.contract_address.unwrap(); - - // Check the default value for slot 0. - let result = node - .eth_rpc(EthRequest::EthGetStorageAt( - Address::from(ReviveAddress::new(contract_address)), - U256::from(0), - None, - )) - .await + // Set storage of a new random account. + { + let random_addr = Address::random(); + + let stored_value = + node.get_storage_at(U256::from(0), ReviveAddress::from(random_addr).inner()).await; + assert_eq!(stored_value, 0); + + // Set a new value for the slot 0. + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetStorageAt( + random_addr, + U256::from(0), + B256::from(U256::from(511)), + )) + .await + .unwrap(), + ) .unwrap(); - let hex_string = unwrap_response::(result).unwrap(); - let hex_value = hex_string.strip_prefix("0x").unwrap_or(&hex_string); - let stored_value = U256::from_str_radix(hex_value, 16).unwrap(); - assert_eq!(stored_value, 0); - // Set a new value for the slot 0. - - unwrap_response::<()>( - node.eth_rpc(EthRequest::SetStorageAt( - Address::from(ReviveAddress::new(contract_address)), - U256::from(0), - B256::from(U256::from(511)), - )) - .await - .unwrap(), - ) - .unwrap(); + // Check that the value was updated + let stored_value = + node.get_storage_at(U256::from(0), ReviveAddress::from(random_addr).inner()).await; + assert_eq!(stored_value, 511); + } + + // Update the storage of an existing account + { + let contract_code = get_contract_code("SimpleStorage"); + let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(std::time::Duration::from_millis(400)).await; + let receipt = node.get_transaction_receipt(tx_hash).await; + let contract_address = receipt.contract_address.unwrap(); + + // Check the default value for slot 0. + let stored_value = node.get_storage_at(U256::from(0), contract_address).await; + assert_eq!(stored_value, 0); + + // Set a new value for the slot 0. + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetStorageAt( + Address::from(ReviveAddress::new(contract_address)), + U256::from(0), + B256::from(U256::from(511)), + )) + .await + .unwrap(), + ) + .unwrap(); - // Check that the value was updated - let result = node - .eth_rpc(EthRequest::EthGetStorageAt( - Address::from(ReviveAddress::new(contract_address)), - U256::from(0), - None, - )) - .await + // Check that the value was updated + let stored_value = node.get_storage_at(U256::from(0), contract_address).await; + assert_eq!(stored_value, 511); + } + + // Set storage for a EOA account (Alith). + { + let stored_value = node.get_storage_at(U256::from(0), alith.address()).await; + assert_eq!(stored_value, 0); + + // Set a new value for the slot 0. + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetStorageAt( + Address::from(ReviveAddress::new(alith.address())), + U256::from(0), + B256::from(U256::from(511)), + )) + .await + .unwrap(), + ) .unwrap(); - let hex_string = unwrap_response::(result).unwrap(); - let hex_value = hex_string.strip_prefix("0x").unwrap_or(&hex_string); - let stored_value = U256::from_str_radix(hex_value, 16).unwrap(); - assert_eq!(stored_value, 511); + + // Check that the value was updated + let stored_value = node.get_storage_at(U256::from(0), alith.address()).await; + assert_eq!(stored_value, 511); + } } diff --git a/crates/script/src/progress.rs b/crates/script/src/progress.rs index 09329ad6b05ab..2f97d895fd141 100644 --- a/crates/script/src/progress.rs +++ b/crates/script/src/progress.rs @@ -171,7 +171,7 @@ impl ScriptProgress { progress } - /// Traverses a set of pendings and either finds receipts, or clears them from + /// Traverses a set of pending transactions and either finds receipts, or clears them from /// the deployment sequence. /// /// For each `tx_hash`, we check if it has confirmed. If it has From 1935661a00ac466eaa52b8be9c1dbd1aca8f988a Mon Sep 17 00:00:00 2001 From: Alexandru Gheorghe <49718502+alexggh@users.noreply.github.com> Date: Wed, 5 Nov 2025 15:15:53 +0200 Subject: [PATCH 11/44] update to latest polkadot-sdk master (#363) Signed-off-by: Alexandru Gheorghe Signed-off-by: alindima --- .github/workflows/test.yml | 2 +- Cargo.lock | 790 +++++++++--------- crates/anvil-polkadot/Cargo.toml | 2 +- .../anvil-polkadot/src/api_server/server.rs | 87 +- crates/anvil-polkadot/src/config.rs | 15 +- .../src/substrate_node/service/storage.rs | 56 +- crates/anvil-polkadot/tests/it/snapshot.rs | 6 +- .../anvil-polkadot/tests/it/standard_rpc.rs | 15 +- .../anvil-polkadot/tests/it/state_injector.rs | 5 +- crates/anvil-polkadot/tests/it/utils.rs | 22 +- crates/forge/tests/cli/revive_vm.rs | 2 +- 11 files changed, 489 insertions(+), 513 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 11ae96ec88e33..cb976c8618b2b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -41,7 +41,7 @@ jobs: sudo apt-get update sudo apt-get install -y clang libclang-dev - name: Build documentation - run: cargo doc --workspace --all-features --no-deps --document-private-items + run: cargo +nightly doc --workspace --all-features --no-deps --document-private-items env: RUSTDOCFLAGS: --cfg docsrs -D warnings --show-type-layout --generate-link-to-definition --enable-index-page -Zunstable-options - name: Deploy documentation diff --git a/Cargo.lock b/Cargo.lock index 699efddd176c2..50177923b8e3b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1331,16 +1331,28 @@ dependencies = [ "ark-std 0.4.0", ] +[[package]] +name = "ark-bls12-377" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfedac3173d12820a5e0d6cd4de31b49719a74f4a41dc09b6652d0276a3b2cd4" +dependencies = [ + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-std 0.5.0", +] + [[package]] name = "ark-bls12-377-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20c7021f180a0cbea0380eba97c2af3c57074cdaffe0eef7e840e1c9f2841e55" +checksum = "e47f3bb6e4ef3c0edb795769fc11469767ce807ed1ccdc979ab101aea2dbf4b5" dependencies = [ - "ark-bls12-377", - "ark-ec 0.4.2", + "ark-bls12-377 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1369,16 +1381,16 @@ dependencies = [ [[package]] name = "ark-bls12-381-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1dc4b3d08f19e8ec06e949712f95b8361e43f1391d94f65e4234df03480631c" +checksum = "0f1dbb23366825700828d373d5fc9c07b7f92253ffed47ab455003b7590d786d" dependencies = [ - "ark-bls12-381 0.4.0", - "ark-ec 0.4.2", - "ark-ff 0.4.2", + "ark-bls12-381 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-models-ext", - "ark-serialize 0.4.2", - "ark-std 0.4.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", ] [[package]] @@ -1395,27 +1407,27 @@ dependencies = [ [[package]] name = "ark-bw6-761" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e0605daf0cc5aa2034b78d008aaf159f56901d92a52ee4f6ecdfdac4f426700" +checksum = "1cc9cae367e0c3c0b52e3ef13371122752654f45d0212ec7306fb0c1c012cd98" dependencies = [ - "ark-bls12-377", - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-std 0.4.0", + "ark-bls12-377 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-std 0.5.0", ] [[package]] name = "ark-bw6-761-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccee5fba47266f460067588ee1bf070a9c760bf2050c1c509982c5719aadb4f2" +checksum = "c6e1216f968e21c72fdaba53dbc9e547a8a60cc87b1dc74ac589727e906f9297" dependencies = [ "ark-bw6-761", - "ark-ec 0.4.2", - "ark-ff 0.4.2", + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1432,7 +1444,6 @@ dependencies = [ "hashbrown 0.13.2", "itertools 0.10.5", "num-traits", - "rayon", "zeroize", ] @@ -1460,39 +1471,27 @@ dependencies = [ [[package]] name = "ark-ed-on-bls12-377" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10d901b9ac4b38f9c32beacedfadcdd64e46f8d7f8e88c1ae1060022cf6f6c6" +checksum = "ebbf817b2db27d2787009b2ff76304a5b90b4b01bb16aa8351701fd40f5f37b2" dependencies = [ - "ark-bls12-377", - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-std 0.4.0", + "ark-bls12-377 0.5.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-std 0.5.0", ] [[package]] name = "ark-ed-on-bls12-377-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524a4fb7540df2e1a8c2e67a83ba1d1e6c3947f4f9342cc2359fc2e789ad731d" +checksum = "05093aa26f017411708e1271047852cc5f58686336f1f1a56fb2df747c3e173a" dependencies = [ - "ark-ec 0.4.2", + "ark-ec 0.5.0", "ark-ed-on-bls12-377", - "ark-ff 0.4.2", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", -] - -[[package]] -name = "ark-ed-on-bls12-381-bandersnatch" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cde0f2aa063a2a5c28d39b47761aa102bda7c13c84fc118a61b87c7b2f785c" -dependencies = [ - "ark-bls12-381 0.4.0", - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1509,15 +1508,15 @@ dependencies = [ [[package]] name = "ark-ed-on-bls12-381-bandersnatch-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d15185f1acb49a07ff8cbe5f11a1adc5a93b19e211e325d826ae98e98e124346" +checksum = "5e6dce0c47def6f25cf01022acded4f32732f577187dfcd1268510093ef16ea6" dependencies = [ - "ark-ec 0.4.2", - "ark-ed-on-bls12-381-bandersnatch 0.4.0", - "ark-ff 0.4.2", + "ark-ec 0.5.0", + "ark-ed-on-bls12-381-bandersnatch", + "ark-ff 0.5.0", "ark-models-ext", - "ark-std 0.4.0", + "ark-std 0.5.0", ] [[package]] @@ -1649,14 +1648,14 @@ dependencies = [ [[package]] name = "ark-models-ext" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9eab5d4b5ff2f228b763d38442adc9b084b0a465409b059fac5c2308835ec2" +checksum = "ff772c552d00e9c092eab0608632342c553abbf6bca984008b55100a9a78a3a6" dependencies = [ - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", + "ark-ec 0.5.0", + "ark-ff 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", "derivative", ] @@ -1718,26 +1717,14 @@ dependencies = [ "tracing-subscriber 0.2.25", ] -[[package]] -name = "ark-scale" -version = "0.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f69c00b3b529be29528a6f2fd5fa7b1790f8bed81b9cdca17e326538545a179" -dependencies = [ - "ark-ec 0.4.2", - "ark-ff 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "parity-scale-codec", - "scale-info", -] - [[package]] name = "ark-scale" version = "0.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "985c81a9c7b23a72f62b7b20686d5326d2a9956806f37de9ee35cb1238faf0c0" dependencies = [ + "ark-ec 0.5.0", + "ark-ff 0.5.0", "ark-serialize 0.5.0", "ark-std 0.5.0", "parity-scale-codec", @@ -1820,7 +1807,6 @@ checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", "rand 0.8.5", - "rayon", ] [[package]] @@ -1856,7 +1842,7 @@ checksum = "9501da18569b2afe0eb934fb7afd5a247d238b94116155af4dd068f319adfe6d" dependencies = [ "ark-bls12-381 0.5.0", "ark-ec 0.5.0", - "ark-ed-on-bls12-381-bandersnatch 0.5.0", + "ark-ed-on-bls12-381-bandersnatch", "ark-ff 0.5.0", "ark-serialize 0.5.0", "ark-std 0.5.0", @@ -1983,7 +1969,7 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" [[package]] name = "asset-test-utils" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-pallet-parachain-system", "cumulus-pallet-xcmp-queue", @@ -2013,7 +1999,7 @@ dependencies = [ [[package]] name = "assets-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "ethereum-standards", @@ -2835,7 +2821,7 @@ checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" [[package]] name = "binary-merkle-tree" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "hash-db", "log", @@ -3156,7 +3142,7 @@ checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" [[package]] name = "bp-header-chain" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-runtime", "finality-grandpa", @@ -3173,7 +3159,7 @@ dependencies = [ [[package]] name = "bp-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-runtime", @@ -3189,7 +3175,7 @@ dependencies = [ [[package]] name = "bp-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -3206,7 +3192,7 @@ dependencies = [ [[package]] name = "bp-polkadot-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-messages", "bp-runtime", @@ -3223,7 +3209,7 @@ dependencies = [ [[package]] name = "bp-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-messages", @@ -3241,7 +3227,7 @@ dependencies = [ [[package]] name = "bp-runtime" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -3264,7 +3250,7 @@ dependencies = [ [[package]] name = "bp-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-parachains", @@ -3284,7 +3270,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-messages", "bp-runtime", @@ -3301,7 +3287,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub-router" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -3313,7 +3299,7 @@ dependencies = [ [[package]] name = "bridge-hub-common" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -3332,7 +3318,7 @@ dependencies = [ [[package]] name = "bridge-hub-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "asset-test-utils", "bp-header-chain", @@ -3374,7 +3360,7 @@ dependencies = [ [[package]] name = "bridge-runtime-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-messages", @@ -3993,7 +3979,7 @@ checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" dependencies = [ "serde", "termcolor", - "unicode-width 0.2.0", + "unicode-width 0.1.14", ] [[package]] @@ -4685,7 +4671,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-aura-ext" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-pallet-parachain-system", "frame-support", @@ -4702,7 +4688,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-dmp-queue" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "frame-benchmarking", @@ -4719,8 +4705,9 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ + "array-bytes", "bytes", "cumulus-pallet-parachain-system-proc-macro", "cumulus-primitives-core", @@ -4756,7 +4743,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system-proc-macro" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -4767,7 +4754,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-session-benchmarking" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -4780,7 +4767,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-solo-to-para" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-pallet-parachain-system", "frame-support", @@ -4795,7 +4782,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-weight-reclaim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-storage-weight-reclaim", "derive-where", @@ -4814,7 +4801,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcm" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -4829,7 +4816,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcmp-queue" version = "0.7.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "approx", "bounded-collections 0.3.2", @@ -4855,7 +4842,7 @@ dependencies = [ [[package]] name = "cumulus-ping" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-pallet-xcm", "cumulus-primitives-core", @@ -4870,7 +4857,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-aura" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-api", "sp-consensus-aura", @@ -4879,7 +4866,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-core-primitives", @@ -4896,7 +4883,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-parachain-inherent" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "cumulus-primitives-core", @@ -4910,7 +4897,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-proof-size-hostfunction" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-externalities", "sp-runtime-interface", @@ -4920,7 +4907,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-storage-weight-reclaim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-proof-size-hostfunction", @@ -4937,7 +4924,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-timestamp" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "sp-inherents", @@ -4947,7 +4934,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-utility" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -4964,7 +4951,7 @@ dependencies = [ [[package]] name = "cumulus-test-relay-sproof-builder" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "parity-scale-codec", @@ -5951,7 +5938,7 @@ dependencies = [ [[package]] name = "ethereum-standards" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "alloy-core", ] @@ -6517,7 +6504,7 @@ dependencies = [ [[package]] name = "fork-tree" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", ] @@ -7226,7 +7213,7 @@ checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-support-procedural", @@ -7250,7 +7237,7 @@ dependencies = [ [[package]] name = "frame-benchmarking-pallet-pov" version = "18.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -7278,7 +7265,7 @@ dependencies = [ [[package]] name = "frame-election-provider-solution-type" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -7289,7 +7276,7 @@ dependencies = [ [[package]] name = "frame-election-provider-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-election-provider-solution-type", "frame-support", @@ -7306,7 +7293,7 @@ dependencies = [ [[package]] name = "frame-executive" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "aquamarine", "frame-support", @@ -7336,7 +7323,7 @@ dependencies = [ [[package]] name = "frame-metadata-hash-extension" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "const-hex", @@ -7352,7 +7339,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "aquamarine", "array-bytes", @@ -7393,7 +7380,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "Inflector", "cfg-expr", @@ -7413,7 +7400,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.3.0", @@ -7425,7 +7412,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro2", "quote", @@ -7435,7 +7422,7 @@ dependencies = [ [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cfg-if", "docify", @@ -7454,7 +7441,7 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -7468,7 +7455,7 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "parity-scale-codec", @@ -7478,7 +7465,7 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "parity-scale-codec", @@ -9129,9 +9116,9 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b26c20e2178756451cfeb0661fb74c47dd5988cb7e3939de7e9241fd604d42" +checksum = "e281ae70cc3b98dac15fced3366a880949e65fc66e345ce857a5682d152f3e62" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -9147,9 +9134,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bacb85abf4117092455e1573625e21b8f8ef4dec8aff13361140b2dc266cdff2" +checksum = "cc4280b709ac3bb5e16cf3bad5056a0ec8df55fa89edfe996361219aadc2c7ea" dependencies = [ "base64 0.22.1", "futures-channel", @@ -9172,9 +9159,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456196007ca3a14db478346f58c7238028d55ee15c1df15115596e411ff27925" +checksum = "348ee569eaed52926b5e740aae20863762b16596476e943c9e415a6479021622" dependencies = [ "async-trait", "bytes", @@ -9199,9 +9186,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c872b6c9961a4ccc543e321bb5b89f6b2d2c7fe8b61906918273a3333c95400c" +checksum = "f50c389d6e6a52eb7c3548a6600c90cf74d9b71cb5912209833f00a5479e9a01" dependencies = [ "async-trait", "base64 0.22.1", @@ -9224,9 +9211,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e65763c942dfc9358146571911b0cd1c361c2d63e2d2305622d40d36376ca80" +checksum = "7398cddf5013cca4702862a2692b66c48a3bd6cf6ec681a47453c93d63cf8de5" dependencies = [ "heck 0.5.0", "proc-macro-crate 3.3.0", @@ -9237,9 +9224,9 @@ dependencies = [ [[package]] name = "jsonrpsee-server" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55e363146da18e50ad2b51a0a7925fc423137a0b1371af8235b1c231a0647328" +checksum = "21429bcdda37dcf2d43b68621b994adede0e28061f816b038b0f18c70c143d51" dependencies = [ "futures-util", "http 1.3.1", @@ -9264,9 +9251,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a8e70baf945b6b5752fc8eb38c918a48f1234daf11355e07106d963f860089" +checksum = "b0f05e0028e55b15dbd2107163b3c744cd3bb4474f193f95d9708acbf5677e44" dependencies = [ "http 1.3.1", "serde", @@ -9276,9 +9263,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6558a9586cad43019dafd0b6311d0938f46efc116b34b28c74778bc11a2edf6" +checksum = "e9d745e4f543fc10fc0e2b11aa1f3be506b1e475d412167e7191a65ecd239f1c" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -9287,9 +9274,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.24.9" +version = "0.24.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b3323d890aa384f12148e8d2a1fd18eb66e9e7e825f9de4fa53bcc19b93eef" +checksum = "78fc744f17e7926d57f478cf9ca6e1ee5d8332bf0514860b1a3cdf1742e614cc" dependencies = [ "http 1.3.1", "jsonrpsee-client-transport", @@ -11462,7 +11449,7 @@ dependencies = [ [[package]] name = "pallet-alliance" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11481,7 +11468,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11499,7 +11486,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion-ops" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11517,7 +11504,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion-tx-payment" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11532,7 +11519,7 @@ dependencies = [ [[package]] name = "pallet-asset-rate" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11546,7 +11533,7 @@ dependencies = [ [[package]] name = "pallet-asset-rewards" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11564,7 +11551,7 @@ dependencies = [ [[package]] name = "pallet-asset-tx-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11580,7 +11567,7 @@ dependencies = [ [[package]] name = "pallet-assets" version = "29.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11596,7 +11583,7 @@ dependencies = [ [[package]] name = "pallet-assets-freezer" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "pallet-assets", @@ -11608,7 +11595,7 @@ dependencies = [ [[package]] name = "pallet-assets-holder" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11623,7 +11610,7 @@ dependencies = [ [[package]] name = "pallet-assets-precompiles" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "ethereum-standards", "frame-support", @@ -11634,7 +11621,7 @@ dependencies = [ [[package]] name = "pallet-atomic-swap" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -11644,7 +11631,7 @@ dependencies = [ [[package]] name = "pallet-aura" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -11660,7 +11647,7 @@ dependencies = [ [[package]] name = "pallet-authority-discovery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -11675,7 +11662,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -11688,7 +11675,7 @@ dependencies = [ [[package]] name = "pallet-babe" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11711,7 +11698,7 @@ dependencies = [ [[package]] name = "pallet-bags-list" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "aquamarine", "docify", @@ -11732,7 +11719,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -11748,7 +11735,7 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -11767,7 +11754,7 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "binary-merkle-tree", @@ -11792,7 +11779,7 @@ dependencies = [ [[package]] name = "pallet-bounties" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11809,7 +11796,7 @@ dependencies = [ [[package]] name = "pallet-bridge-grandpa" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-runtime", @@ -11828,7 +11815,7 @@ dependencies = [ [[package]] name = "pallet-bridge-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-messages", @@ -11847,7 +11834,7 @@ dependencies = [ [[package]] name = "pallet-bridge-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-parachains", @@ -11867,7 +11854,7 @@ dependencies = [ [[package]] name = "pallet-bridge-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-header-chain", "bp-messages", @@ -11890,7 +11877,7 @@ dependencies = [ [[package]] name = "pallet-broker" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bitvec", "frame-benchmarking", @@ -11908,7 +11895,7 @@ dependencies = [ [[package]] name = "pallet-child-bounties" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11926,7 +11913,7 @@ dependencies = [ [[package]] name = "pallet-collator-selection" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11945,7 +11932,7 @@ dependencies = [ [[package]] name = "pallet-collective" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -11962,7 +11949,7 @@ dependencies = [ [[package]] name = "pallet-collective-content" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -11976,7 +11963,7 @@ dependencies = [ [[package]] name = "pallet-contracts" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "environmental", "frame-benchmarking", @@ -12006,7 +11993,7 @@ dependencies = [ [[package]] name = "pallet-contracts-mock-network" version = "3.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12037,7 +12024,7 @@ dependencies = [ [[package]] name = "pallet-contracts-proc-macro" version = "18.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro2", "quote", @@ -12047,7 +12034,7 @@ dependencies = [ [[package]] name = "pallet-contracts-uapi" version = "5.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bitflags 1.3.2", "parity-scale-codec", @@ -12058,7 +12045,7 @@ dependencies = [ [[package]] name = "pallet-conviction-voting" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "assert_matches", "frame-benchmarking", @@ -12074,7 +12061,7 @@ dependencies = [ [[package]] name = "pallet-core-fellowship" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12092,7 +12079,7 @@ dependencies = [ [[package]] name = "pallet-delegated-staking" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12107,7 +12094,7 @@ dependencies = [ [[package]] name = "pallet-democracy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12124,7 +12111,7 @@ dependencies = [ [[package]] name = "pallet-derivatives" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12144,7 +12131,7 @@ dependencies = [ [[package]] name = "pallet-dev-mode" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12159,7 +12146,7 @@ dependencies = [ [[package]] name = "pallet-dummy-dim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12177,7 +12164,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-block" version = "0.9.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12198,7 +12185,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-phase" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12219,7 +12206,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-support-benchmarking" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12232,7 +12219,7 @@ dependencies = [ [[package]] name = "pallet-elections-phragmen" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12250,7 +12237,7 @@ dependencies = [ [[package]] name = "pallet-fast-unstake" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -12268,7 +12255,7 @@ dependencies = [ [[package]] name = "pallet-glutton" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "blake2 0.10.6", "frame-benchmarking", @@ -12286,7 +12273,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12308,7 +12295,7 @@ dependencies = [ [[package]] name = "pallet-identity" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "enumflags2", "frame-benchmarking", @@ -12324,7 +12311,7 @@ dependencies = [ [[package]] name = "pallet-im-online" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12343,7 +12330,7 @@ dependencies = [ [[package]] name = "pallet-indices" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12358,7 +12345,7 @@ dependencies = [ [[package]] name = "pallet-insecure-randomness-collective-flip" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12369,7 +12356,7 @@ dependencies = [ [[package]] name = "pallet-lottery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12382,7 +12369,7 @@ dependencies = [ [[package]] name = "pallet-membership" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12398,7 +12385,7 @@ dependencies = [ [[package]] name = "pallet-message-queue" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "environmental", "frame-benchmarking", @@ -12417,7 +12404,7 @@ dependencies = [ [[package]] name = "pallet-meta-tx" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -12435,7 +12422,7 @@ dependencies = [ [[package]] name = "pallet-migrations" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -12454,7 +12441,7 @@ dependencies = [ [[package]] name = "pallet-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "parity-scale-codec", @@ -12468,7 +12455,7 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "parity-scale-codec", @@ -12480,7 +12467,7 @@ dependencies = [ [[package]] name = "pallet-multi-asset-bounties" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -12497,7 +12484,7 @@ dependencies = [ [[package]] name = "pallet-multisig" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "parity-scale-codec", @@ -12508,7 +12495,7 @@ dependencies = [ [[package]] name = "pallet-nft-fractionalization" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "pallet-assets", @@ -12521,7 +12508,7 @@ dependencies = [ [[package]] name = "pallet-nfts" version = "22.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "enumflags2", "frame-benchmarking", @@ -12538,7 +12525,7 @@ dependencies = [ [[package]] name = "pallet-nfts-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "sp-api", @@ -12547,7 +12534,7 @@ dependencies = [ [[package]] name = "pallet-nis" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12557,7 +12544,7 @@ dependencies = [ [[package]] name = "pallet-node-authorization" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "parity-scale-codec", @@ -12568,7 +12555,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12586,7 +12573,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-benchmarking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12606,7 +12593,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-runtime-api" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "pallet-nomination-pools", "parity-scale-codec", @@ -12616,7 +12603,7 @@ dependencies = [ [[package]] name = "pallet-offences" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12631,7 +12618,7 @@ dependencies = [ [[package]] name = "pallet-offences-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12654,7 +12641,7 @@ dependencies = [ [[package]] name = "pallet-oracle" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12672,7 +12659,7 @@ dependencies = [ [[package]] name = "pallet-oracle-runtime-api" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -12683,7 +12670,7 @@ dependencies = [ [[package]] name = "pallet-origin-restriction" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12701,7 +12688,7 @@ dependencies = [ [[package]] name = "pallet-paged-list" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "parity-scale-codec", @@ -12713,7 +12700,7 @@ dependencies = [ [[package]] name = "pallet-parameters" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -12730,7 +12717,7 @@ dependencies = [ [[package]] name = "pallet-people" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12748,7 +12735,7 @@ dependencies = [ [[package]] name = "pallet-preimage" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12764,7 +12751,7 @@ dependencies = [ [[package]] name = "pallet-proxy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12774,7 +12761,7 @@ dependencies = [ [[package]] name = "pallet-ranked-collective" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12792,7 +12779,7 @@ dependencies = [ [[package]] name = "pallet-recovery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12802,7 +12789,7 @@ dependencies = [ [[package]] name = "pallet-referenda" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12819,7 +12806,7 @@ dependencies = [ [[package]] name = "pallet-remark" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -12835,7 +12822,7 @@ dependencies = [ [[package]] name = "pallet-revive" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "alloy-consensus", "alloy-core", @@ -12869,6 +12856,7 @@ dependencies = [ "rlp 0.6.1", "scale-info", "serde", + "serde_json", "sp-api", "sp-arithmetic", "sp-consensus-aura", @@ -12885,7 +12873,7 @@ dependencies = [ [[package]] name = "pallet-revive-eth-rpc" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "anyhow", "clap", @@ -12922,7 +12910,7 @@ dependencies = [ [[package]] name = "pallet-revive-fixtures" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "alloy-core", "anyhow", @@ -12939,7 +12927,7 @@ dependencies = [ [[package]] name = "pallet-revive-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro2", "quote", @@ -12949,7 +12937,7 @@ dependencies = [ [[package]] name = "pallet-revive-uapi" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "alloy-core", "bitflags 1.3.2", @@ -12964,7 +12952,7 @@ dependencies = [ [[package]] name = "pallet-root-offences" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12980,7 +12968,7 @@ dependencies = [ [[package]] name = "pallet-root-testing" version = "4.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -12993,7 +12981,7 @@ dependencies = [ [[package]] name = "pallet-safe-mode" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "pallet-balances", @@ -13007,7 +12995,7 @@ dependencies = [ [[package]] name = "pallet-salary" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "pallet-ranked-collective", @@ -13019,7 +13007,7 @@ dependencies = [ [[package]] name = "pallet-scheduler" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -13036,7 +13024,7 @@ dependencies = [ [[package]] name = "pallet-scored-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -13049,7 +13037,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -13071,7 +13059,7 @@ dependencies = [ [[package]] name = "pallet-session-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13087,7 +13075,7 @@ dependencies = [ [[package]] name = "pallet-skip-feeless-payment" version = "3.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -13099,7 +13087,7 @@ dependencies = [ [[package]] name = "pallet-society" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13116,7 +13104,7 @@ dependencies = [ [[package]] name = "pallet-staking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -13137,7 +13125,7 @@ dependencies = [ [[package]] name = "pallet-staking-async" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -13161,7 +13149,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-ah-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13181,7 +13169,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-rc-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -13198,7 +13186,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-reward-fn" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "sp-arithmetic", @@ -13207,7 +13195,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "sp-api", @@ -13217,7 +13205,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-fn" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "sp-arithmetic", @@ -13226,7 +13214,7 @@ dependencies = [ [[package]] name = "pallet-staking-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "sp-api", @@ -13236,7 +13224,7 @@ dependencies = [ [[package]] name = "pallet-state-trie-migration" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13252,7 +13240,7 @@ dependencies = [ [[package]] name = "pallet-statement" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", @@ -13269,7 +13257,7 @@ dependencies = [ [[package]] name = "pallet-sudo" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -13284,7 +13272,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -13302,7 +13290,7 @@ dependencies = [ [[package]] name = "pallet-tips" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13320,7 +13308,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13336,7 +13324,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -13348,7 +13336,7 @@ dependencies = [ [[package]] name = "pallet-transaction-storage" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13367,7 +13355,7 @@ dependencies = [ [[package]] name = "pallet-treasury" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -13386,7 +13374,7 @@ dependencies = [ [[package]] name = "pallet-tx-pause" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "parity-scale-codec", @@ -13397,7 +13385,7 @@ dependencies = [ [[package]] name = "pallet-uniques" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13411,7 +13399,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13426,7 +13414,7 @@ dependencies = [ [[package]] name = "pallet-verify-signature" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13441,7 +13429,7 @@ dependencies = [ [[package]] name = "pallet-vesting" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13455,7 +13443,7 @@ dependencies = [ [[package]] name = "pallet-whitelist" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -13465,7 +13453,7 @@ dependencies = [ [[package]] name = "pallet-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bounded-collections 0.3.2", "frame-benchmarking", @@ -13489,7 +13477,7 @@ dependencies = [ [[package]] name = "pallet-xcm-benchmarks" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-benchmarking", "frame-support", @@ -13506,7 +13494,7 @@ dependencies = [ [[package]] name = "pallet-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-messages", "bp-runtime", @@ -13528,7 +13516,7 @@ dependencies = [ [[package]] name = "pallet-xcm-bridge-hub-router" version = "0.5.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-xcm-bridge-hub-router", "frame-benchmarking", @@ -13548,7 +13536,7 @@ dependencies = [ [[package]] name = "pallet-xcm-precompiles" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "pallet-revive", @@ -13562,7 +13550,7 @@ dependencies = [ [[package]] name = "parachains-common" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-utility", @@ -13593,7 +13581,7 @@ dependencies = [ [[package]] name = "parachains-runtimes-test-utils" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-pallet-parachain-system", "cumulus-pallet-xcmp-queue", @@ -14048,7 +14036,7 @@ dependencies = [ [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -14059,7 +14047,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "bounded-collections 0.3.2", @@ -14076,7 +14064,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bitvec", "bounded-collections 0.3.2", @@ -14105,7 +14093,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-common" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bitvec", "frame-benchmarking", @@ -14154,7 +14142,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-metrics" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bs58", "frame-benchmarking", @@ -14166,7 +14154,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-parachains" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bitflags 1.3.2", "bitvec", @@ -14213,7 +14201,7 @@ dependencies = [ [[package]] name = "polkadot-sdk" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "asset-test-utils", "assets-common", @@ -14488,7 +14476,7 @@ dependencies = [ [[package]] name = "polkadot-sdk-frame" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-benchmarking", @@ -15706,7 +15694,7 @@ dependencies = [ [[package]] name = "revive-dev-runtime" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "parity-scale-codec", @@ -16064,7 +16052,7 @@ dependencies = [ [[package]] name = "rococo-runtime-constants" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "polkadot-primitives", @@ -16577,7 +16565,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "sp-core", @@ -16588,7 +16576,7 @@ dependencies = [ [[package]] name = "sc-basic-authorship" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "futures", "log", @@ -16610,7 +16598,7 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "sp-api", @@ -16625,7 +16613,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "docify", @@ -16651,7 +16639,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -16662,7 +16650,7 @@ dependencies = [ [[package]] name = "sc-cli" version = "0.36.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "bip39", @@ -16704,7 +16692,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "fnv", "futures", @@ -16730,7 +16718,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "hash-db", "kvdb", @@ -16758,7 +16746,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "futures", @@ -16781,7 +16769,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "fork-tree", @@ -16812,7 +16800,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "fork-tree", @@ -16849,7 +16837,7 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "fork-tree", "parity-scale-codec", @@ -16862,7 +16850,7 @@ dependencies = [ [[package]] name = "sc-consensus-manual-seal" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "assert_matches", "async-trait", @@ -16897,7 +16885,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "futures", @@ -16920,7 +16908,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", @@ -16943,7 +16931,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "polkavm 0.26.0", "sc-allocator", @@ -16956,7 +16944,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "polkavm 0.26.0", @@ -16967,7 +16955,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "anyhow", "log", @@ -16983,7 +16971,7 @@ dependencies = [ [[package]] name = "sc-informant" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "console", "futures", @@ -16999,7 +16987,7 @@ dependencies = [ [[package]] name = "sc-keystore" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "parking_lot 0.12.4", @@ -17013,7 +17001,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "arrayvec 0.7.6", @@ -17041,7 +17029,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17091,7 +17079,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bitflags 1.3.2", "parity-scale-codec", @@ -17101,7 +17089,7 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17122,7 +17110,7 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17157,7 +17145,7 @@ dependencies = [ [[package]] name = "sc-network-transactions" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "futures", @@ -17176,7 +17164,7 @@ dependencies = [ [[package]] name = "sc-network-types" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bs58", "bytes", @@ -17197,7 +17185,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -17206,7 +17194,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "futures", "jsonrpsee", @@ -17238,7 +17226,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "jsonrpsee", "parity-scale-codec", @@ -17258,7 +17246,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "dyn-clone", "forwarded-header-value", @@ -17282,7 +17270,7 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "futures", @@ -17315,7 +17303,7 @@ dependencies = [ [[package]] name = "sc-runtime-utilities" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "sc-executor", @@ -17330,7 +17318,7 @@ dependencies = [ [[package]] name = "sc-service" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "directories", @@ -17394,7 +17382,7 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.30.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "parity-scale-codec", @@ -17405,7 +17393,7 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "derive_more 0.99.20", "futures", @@ -17425,7 +17413,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "chrono", "futures", @@ -17444,7 +17432,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "chrono", "console", @@ -17472,7 +17460,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -17483,7 +17471,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "futures", @@ -17514,7 +17502,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "futures", @@ -17531,7 +17519,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-channel 1.9.0", "futures", @@ -18436,7 +18424,7 @@ checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "slot-range-helper" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "enumn", "parity-scale-codec", @@ -18618,7 +18606,7 @@ dependencies = [ [[package]] name = "snowbridge-core" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bp-relayers", "frame-support", @@ -18853,7 +18841,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "hash-db", @@ -18875,7 +18863,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "Inflector", "blake2 0.10.6", @@ -18889,7 +18877,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -18901,7 +18889,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "integer-sqrt", @@ -18915,7 +18903,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -18927,7 +18915,7 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-api", "sp-inherents", @@ -18937,7 +18925,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "futures", "parity-scale-codec", @@ -18956,7 +18944,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "futures", @@ -18970,7 +18958,7 @@ dependencies = [ [[package]] name = "sp-consensus-aura" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "parity-scale-codec", @@ -18986,7 +18974,7 @@ dependencies = [ [[package]] name = "sp-consensus-babe" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "parity-scale-codec", @@ -19004,7 +18992,7 @@ dependencies = [ [[package]] name = "sp-consensus-beefy" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -19024,7 +19012,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "finality-grandpa", "log", @@ -19041,7 +19029,7 @@ dependencies = [ [[package]] name = "sp-consensus-pow" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "sp-api", @@ -19052,7 +19040,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -19063,7 +19051,7 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "ark-vrf", "array-bytes", @@ -19110,7 +19098,7 @@ dependencies = [ [[package]] name = "sp-core-hashing" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] @@ -19118,7 +19106,7 @@ dependencies = [ [[package]] name = "sp-core-hashing-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-crypto-hashing-proc-macro", ] @@ -19126,20 +19114,20 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ - "ark-bls12-377", + "ark-bls12-377 0.5.0", "ark-bls12-377-ext", - "ark-bls12-381 0.4.0", + "ark-bls12-381 0.5.0", "ark-bls12-381-ext", "ark-bw6-761", "ark-bw6-761-ext", - "ark-ec 0.4.2", + "ark-ec 0.5.0", "ark-ed-on-bls12-377", "ark-ed-on-bls12-377-ext", - "ark-ed-on-bls12-381-bandersnatch 0.4.0", + "ark-ed-on-bls12-381-bandersnatch", "ark-ed-on-bls12-381-bandersnatch-ext", - "ark-scale 0.0.12", + "ark-scale", "sp-runtime-interface", ] @@ -19160,7 +19148,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "blake2b_simd", "byteorder", @@ -19173,7 +19161,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "quote", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", @@ -19183,7 +19171,7 @@ dependencies = [ [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "kvdb", "parking_lot 0.12.4", @@ -19192,7 +19180,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "proc-macro2", "quote", @@ -19202,7 +19190,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "environmental", "parity-scale-codec", @@ -19212,7 +19200,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.8.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -19224,7 +19212,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -19237,7 +19225,7 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bytes", "docify", @@ -19263,7 +19251,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-core", "sp-runtime", @@ -19273,7 +19261,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", @@ -19284,7 +19272,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "thiserror 1.0.69", "zstd 0.12.4", @@ -19293,7 +19281,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-metadata", "parity-scale-codec", @@ -19303,7 +19291,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -19314,7 +19302,7 @@ dependencies = [ [[package]] name = "sp-mmr-primitives" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", "parity-scale-codec", @@ -19331,7 +19319,7 @@ dependencies = [ [[package]] name = "sp-npos-elections" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -19344,7 +19332,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-api", "sp-core", @@ -19354,7 +19342,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "backtrace", "regex", @@ -19363,7 +19351,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "rustc-hash 1.1.0", "serde", @@ -19373,7 +19361,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "binary-merkle-tree", "bytes", @@ -19403,7 +19391,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -19421,7 +19409,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "Inflector", "expander", @@ -19434,7 +19422,7 @@ dependencies = [ [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "scale-info", @@ -19448,7 +19436,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -19461,7 +19449,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "hash-db", "log", @@ -19481,7 +19469,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "aes-gcm", "curve25519-dalek", @@ -19505,12 +19493,12 @@ dependencies = [ [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "impl-serde", "parity-scale-codec", @@ -19522,7 +19510,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "parity-scale-codec", @@ -19534,7 +19522,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "regex", @@ -19546,7 +19534,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-api", "sp-runtime", @@ -19555,7 +19543,7 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "parity-scale-codec", @@ -19569,7 +19557,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "ahash", "foldhash 0.1.5", @@ -19594,7 +19582,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "impl-serde", "parity-scale-codec", @@ -19611,7 +19599,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", "proc-macro-warning", @@ -19623,7 +19611,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -19635,7 +19623,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "bounded-collections 0.3.2", "parity-scale-codec", @@ -19915,7 +19903,7 @@ dependencies = [ [[package]] name = "staging-parachain-info" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -19928,7 +19916,7 @@ dependencies = [ [[package]] name = "staging-xcm" version = "7.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", "bounded-collections 0.3.2", @@ -19949,7 +19937,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "environmental", "frame-support", @@ -19973,7 +19961,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "environmental", "frame-benchmarking", @@ -20157,7 +20145,7 @@ dependencies = [ [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "hmac 0.12.1", "pbkdf2 0.12.2", @@ -20182,7 +20170,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-support" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "jsonrpsee", @@ -20196,7 +20184,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "docify", "frame-system-rpc-runtime-api", @@ -20216,7 +20204,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "http-body-util", "hyper 1.6.0", @@ -20230,7 +20218,7 @@ dependencies = [ [[package]] name = "substrate-rpc-client" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "async-trait", "jsonrpsee", @@ -20254,7 +20242,7 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "build-helper", "cargo_metadata 0.15.4", @@ -20793,7 +20781,7 @@ checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "testnet-parachains-constants" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -21857,7 +21845,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "225eaa083192400abfe78838e3089c539a361e0dd9b6884f61b5c6237676ec01" dependencies = [ - "ark-scale 0.0.13", + "ark-scale", "ark-serialize 0.5.0", "ark-vrf", "bounded-collections 0.1.9", @@ -21900,7 +21888,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6bfb937b3d12077654a9e43e32a4e9c20177dd9fea0f3aba673e7840bb54f32" dependencies = [ - "ark-bls12-377", + "ark-bls12-377 0.4.0", "ark-bls12-381 0.4.0", "ark-ec 0.4.2", "ark-ff 0.4.2", @@ -22666,7 +22654,7 @@ dependencies = [ [[package]] name = "westend-runtime-constants" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "polkadot-primitives", @@ -23347,7 +23335,7 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "Inflector", "proc-macro2", @@ -23358,7 +23346,7 @@ dependencies = [ [[package]] name = "xcm-runtime-apis" version = "0.1.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "parity-scale-codec", @@ -23372,7 +23360,7 @@ dependencies = [ [[package]] name = "xcm-simulator" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#2d514fa3e40718db64734df26086a2971f6d730d" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "frame-support", "frame-system", diff --git a/crates/anvil-polkadot/Cargo.toml b/crates/anvil-polkadot/Cargo.toml index cc845db6cf96e..c1abe8416719b 100644 --- a/crates/anvil-polkadot/Cargo.toml +++ b/crates/anvil-polkadot/Cargo.toml @@ -142,7 +142,7 @@ clap_complete_fig = "4" subxt = "0.43.0" subxt-signer = "0.43.0" tokio-stream = "0.1.17" -jsonrpsee = "0.24.9" +jsonrpsee = "0.24.10" sqlx = "0.8.6" revm.workspace = true diff --git a/crates/anvil-polkadot/src/api_server/server.rs b/crates/anvil-polkadot/src/api_server/server.rs index 019500ffe2a0c..4fd7662734c4a 100644 --- a/crates/anvil-polkadot/src/api_server/server.rs +++ b/crates/anvil-polkadot/src/api_server/server.rs @@ -22,7 +22,7 @@ use crate::{ service::{ BackendError, BackendWithOverlay, Client, Service, TransactionPoolHandle, storage::{ - AccountType, ByteCodeType, CodeInfo, ContractInfo, ReviveAccountInfo, + AccountType, BytecodeType, CodeInfo, ContractInfo, ReviveAccountInfo, SystemAccountInfo, }, }, @@ -47,14 +47,16 @@ use indexmap::IndexMap; use pallet_revive_eth_rpc::{ BlockInfoProvider, EthRpcError, ReceiptExtractor, ReceiptProvider, SubxtBlockInfoProvider, client::{Client as EthRpcClient, ClientError, SubscriptionType}, - subxt_client::{self, SrcChainConfig}, + subxt_client::{ + self, SrcChainConfig, runtime_types::bounded_collections::bounded_vec::BoundedVec, + }, }; use polkadot_sdk::{ pallet_revive::{ ReviveApi, evm::{ - Block, Bytes, FeeHistoryResult, FilterResults, ReceiptInfo, TransactionInfo, - TransactionSigned, + Block, BlockNumberOrTagOrHash, BlockTag, Bytes, FeeHistoryResult, FilterResults, + ReceiptInfo, TransactionInfo, TransactionSigned, }, }, parachains_common::{AccountId, Hash, Nonce}, @@ -62,7 +64,6 @@ use polkadot_sdk::{ sc_client_api::HeaderBackend, sc_service::{InPoolTransaction, SpawnTaskHandle, TransactionPool}, sp_api::{Metadata as _, ProvideRuntimeApi}, - sp_arithmetic::Permill, sp_blockchain::Info, sp_core::{self, Hasher, keccak_256}, sp_runtime::traits::BlakeTwo256, @@ -605,7 +606,7 @@ impl ApiServer { return Ok(None); }; let block = self.eth_rpc_client.evm_block(block, hydrated_transactions).await; - Ok(Some(block)) + Ok(block) } async fn estimate_gas( @@ -738,7 +739,7 @@ impl ApiServer { return Ok(None); }; let block = self.eth_rpc_client.evm_block(block, hydrated_transactions).await; - Ok(Some(block)) + Ok(block) } pub(crate) async fn snapshot(&mut self) -> Result { @@ -843,10 +844,13 @@ impl ApiServer { &self, block_number: BlockNumberOrTag, ) -> Result> { - let Some(block) = self.get_block_by_number(block_number, false).await? else { + let Some(hash) = + self.maybe_get_block_hash_for_tag(Some(BlockId::Number(block_number))).await? + else { return Ok(None); }; - Ok(self.eth_rpc_client.receipts_count_per_block(&block.hash).await.map(U256::from)) + + Ok(self.eth_rpc_client.receipts_count_per_block(&hash).await.map(U256::from)) } async fn get_transaction_by_block_hash_and_index( @@ -926,8 +930,9 @@ impl ApiServer { } async fn max_priority_fee_per_gas(&self) -> Result { - let gas_price = self.gas_price().await?; - Ok(Permill::from_percent(20).mul_ceil(gas_price)) + // We do not support tips. Hence the recommended priority fee is + // always zero. The effective gas price will always be the base price. + Ok(Default::default()) } pub fn accounts(&self) -> Result> { @@ -1020,7 +1025,7 @@ impl ApiServer { // If the account doesn't exist, create one. None => { let contract_info = new_contract_info(&address, (*KECCAK_EMPTY).into(), nonce); - let trie_id = contract_info.trie_id.clone(); + let trie_id = contract_info.trie_id.0.clone(); self.backend.inject_revive_account_info( latest_block, @@ -1036,7 +1041,7 @@ impl ApiServer { // If the account is not already a contract account, make it one. Some(ReviveAccountInfo { account_type: AccountType::EOA, dust }) => { let contract_info = new_contract_info(&address, (*KECCAK_EMPTY).into(), nonce); - let trie_id = contract_info.trie_id.clone(); + let trie_id = contract_info.trie_id.0.clone(); self.backend.inject_revive_account_info( latest_block, @@ -1048,12 +1053,12 @@ impl ApiServer { } Some(ReviveAccountInfo { account_type: AccountType::Contract(contract_info), .. - }) => contract_info.trie_id, + }) => contract_info.trie_id.0, }; self.backend.inject_child_storage( latest_block, - trie_id.to_vec(), + trie_id, key.to_be_bytes_vec(), value.to_vec(), ); @@ -1128,7 +1133,7 @@ impl ApiServer { let code_info = old_code_info .map(|mut code_info| { code_info.code_len = bytes.len() as u32; - code_info.code_type = ByteCodeType::Evm; + code_info.code_type = BytecodeType::Evm; code_info }) .unwrap_or_else(|| CodeInfo { @@ -1137,7 +1142,7 @@ impl ApiServer { refcount: 1, code_len: bytes.len() as u32, behaviour_version: 0, - code_type: ByteCodeType::Evm, + code_type: BytecodeType::Evm, }); self.backend.inject_pristine_code(latest_block, code_hash, Some(bytes)); @@ -1241,21 +1246,38 @@ impl ApiServer { self.update_block_provider_on_revert(&revert_info.info).await?; } - let hash = self - .get_block_hash_for_tag(Some(BlockId::Number(BlockNumberOrTag::Number( - revert_info.info.best_number.into(), - )))) - .await?; - self.update_time_on_revert(hash).await?; + self.update_time_on_revert(revert_info.info.best_hash).await?; Ok(()) } + async fn maybe_get_block_hash_for_tag( + &self, + block_id: Option, + ) -> Result> { + match ReviveBlockId::from(block_id).inner() { + BlockNumberOrTagOrHash::BlockHash(hash) => Ok(Some(hash)), + BlockNumberOrTagOrHash::BlockNumber(block_number) => { + let n = block_number.try_into().map_err(|_| { + Error::InvalidParams("Block number conversion failed".to_string()) + })?; + Ok(self.eth_rpc_client.get_block_hash(n).await?) + } + BlockNumberOrTagOrHash::BlockTag(BlockTag::Finalized | BlockTag::Safe) => { + let block = self.eth_rpc_client.latest_finalized_block().await; + Ok(Some(block.hash())) + } + BlockNumberOrTagOrHash::BlockTag(_) => { + let block = self.eth_rpc_client.latest_block().await; + Ok(Some(block.hash())) + } + } + } + async fn get_block_hash_for_tag(&self, block_id: Option) -> Result { - self.eth_rpc_client - .block_hash_for_tag(ReviveBlockId::from(block_id).inner()) - .await - .map_err(Error::from) + self.maybe_get_block_hash_for_tag(block_id) + .await? + .ok_or(Error::InvalidParams("Block number not found".to_string())) } fn get_account_id(&self, block: Hash, address: Address) -> Result { @@ -1429,16 +1451,11 @@ impl ApiServer { fn new_contract_info(address: &Address, code_hash: H256, nonce: Nonce) -> ContractInfo { let address = H160::from_slice(address.as_slice()); - let trie_id = { - let buf = ("bcontract_trie_v1", address, nonce).using_encoded(BlakeTwo256::hash); - buf.as_ref() - .to_vec() - .try_into() - .expect("Runtime uses a reasonable hash size. Hence sizeof(T::Hash) <= 128; qed") - }; + let trie_id = + ("bcontract_trie_v1", address, nonce).using_encoded(BlakeTwo256::hash).as_ref().to_vec(); ContractInfo { - trie_id, + trie_id: BoundedVec(trie_id), code_hash, storage_bytes: 0, storage_items: 0, diff --git a/crates/anvil-polkadot/src/config.rs b/crates/anvil-polkadot/src/config.rs index 13cee2f199f42..bc994c22978a0 100644 --- a/crates/anvil-polkadot/src/config.rs +++ b/crates/anvil-polkadot/src/config.rs @@ -1,6 +1,9 @@ -use crate::substrate_node::chain_spec::keypairs_from_private_keys; +use crate::{ + api_server::revive_conversions::ReviveAddress, + substrate_node::chain_spec::keypairs_from_private_keys, +}; use alloy_genesis::Genesis; -use alloy_primitives::{U256, hex, map::HashMap, utils::Unit}; +use alloy_primitives::{Address, U256, hex, map::HashMap, utils::Unit}; use alloy_signer::Signer; use alloy_signer_local::{ MnemonicBuilder, PrivateKeySigner, @@ -347,8 +350,12 @@ Available Accounts ); let balance = alloy_primitives::utils::format_ether(self.genesis_balance); for (idx, wallet) in self.genesis_accounts.iter().enumerate() { - write!(s, "\n({idx}) {} ({balance} ETH)", Account::from(wallet.clone()).address()) - .unwrap(); + write!( + s, + "\n({idx}) {} ({balance} ETH)", + Address::from(ReviveAddress::new(Account::from(wallet.clone()).address())) + ) + .unwrap(); } let _ = write!( diff --git a/crates/anvil-polkadot/src/substrate_node/service/storage.rs b/crates/anvil-polkadot/src/substrate_node/service/storage.rs index abca913a8a392..57c5bdad9de1a 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/storage.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/storage.rs @@ -1,54 +1,10 @@ -use codec::{Decode, Encode}; -use polkadot_sdk::{ - frame_support::BoundedVec, - frame_system, - pallet_balances::AccountData, - parachains_common::{AccountId, Nonce}, - sp_core::ConstU32, -}; -use substrate_runtime::{Balance, Hash}; - -#[derive(Encode, Decode)] -pub struct ReviveAccountInfo { - pub account_type: AccountType, - pub dust: u32, -} +use polkadot_sdk::{frame_system, pallet_balances::AccountData, parachains_common::Nonce}; +use substrate_runtime::Balance; -#[derive(Encode, Decode)] -pub enum AccountType { - Contract(ContractInfo), - EOA, -} - -#[derive(Encode, Decode)] -pub struct ContractInfo { - pub trie_id: BoundedVec>, - pub code_hash: Hash, - pub storage_bytes: u32, - pub storage_items: u32, - pub storage_byte_deposit: Balance, - pub storage_item_deposit: Balance, - pub storage_base_deposit: Balance, - pub immutable_data_len: u32, -} - -#[derive(Encode, Decode)] -pub struct CodeInfo { - pub owner: AccountId, - #[codec(compact)] - pub deposit: Balance, - #[codec(compact)] - pub refcount: u64, - pub code_len: u32, - pub code_type: ByteCodeType, - pub behaviour_version: u32, -} - -#[derive(Encode, Decode)] -pub enum ByteCodeType { - Pvm, - Evm, -} +pub use pallet_revive_eth_rpc::subxt_client::runtime_types::pallet_revive::{ + storage::{AccountInfo as ReviveAccountInfo, AccountType, ContractInfo}, + vm::{BytecodeType, CodeInfo}, +}; pub type SystemAccountInfo = frame_system::AccountInfo>; diff --git a/crates/anvil-polkadot/tests/it/snapshot.rs b/crates/anvil-polkadot/tests/it/snapshot.rs index 754eb6a40f0ec..60ade11d39e20 100644 --- a/crates/anvil-polkadot/tests/it/snapshot.rs +++ b/crates/anvil-polkadot/tests/it/snapshot.rs @@ -3,8 +3,7 @@ use std::time::Duration; use crate::{ abi::Multicall, utils::{ - BlockWaitTimeout, EXISTENTIAL_DEPOSIT, TestNode, assert_with_tolerance, get_contract_code, - unwrap_response, + BlockWaitTimeout, TestNode, assert_with_tolerance, get_contract_code, unwrap_response, }, }; use alloy_primitives::{Address, Bytes, U256}; @@ -241,8 +240,7 @@ async fn test_balances_and_txs_index_after_evm_revert() { alith_balance_after_tx0, alith_initial_balance - AlloyU256::from(receipt_info.effective_gas_price * receipt_info.gas_used).inner() - - transfer_amount - - U256::from(EXISTENTIAL_DEPOSIT), + - transfer_amount, "alith's balance should have changed" ); assert_eq!(dest_balance, transfer_amount, "dest's balance should have changed"); diff --git a/crates/anvil-polkadot/tests/it/standard_rpc.rs b/crates/anvil-polkadot/tests/it/standard_rpc.rs index 8874292e5facd..45d10367c8e0a 100644 --- a/crates/anvil-polkadot/tests/it/standard_rpc.rs +++ b/crates/anvil-polkadot/tests/it/standard_rpc.rs @@ -274,8 +274,8 @@ async fn test_eth_get_transaction_count() { .unwrap(), ) .unwrap_err(); - assert_eq!(err.code, ErrorCode::InternalError); - assert_eq!(err.message, "Revive call failed: Client error: hash not found"); + assert_eq!(err.code, ErrorCode::InvalidParams); + assert_eq!(err.message, "Block number not found"); assert_eq!( unwrap_response::( @@ -494,11 +494,12 @@ async fn test_get_transaction_by_hash_and_index() { .unwrap() .unwrap(); - assert_eq!(first_hash, transaction_info_1.block_hash); + let eth_first_hash = node.resolve_ethereum_hash(first_hash).unwrap(); + assert_eq!(eth_first_hash, transaction_info_1.block_hash); assert_eq!(transaction_info_1.from, alith.address()); assert_eq!(tx_hash0, transaction_info_1.hash); - assert_eq!(first_hash, transaction_info_2.block_hash); + assert_eq!(eth_first_hash, transaction_info_2.block_hash); assert_eq!(transaction_info_2.from, baltathar.address()); assert_eq!(tx_hash1, transaction_info_2.hash); } @@ -550,7 +551,7 @@ async fn test_get_transaction_by_number_and_index() { .unwrap() .unwrap(); - let first_hash = node.block_hash_by_number(1).await.unwrap(); + let first_hash = node.eth_block_hash_by_number(1).await.unwrap(); assert_eq!(first_hash, transaction_info_1.block_hash); assert_eq!(transaction_info_1.from, alith.address()); assert_eq!(tx_hash0, transaction_info_1.hash); @@ -585,7 +586,7 @@ async fn test_get_transaction_by_hash() { .unwrap() .unwrap(); - let first_hash = node.block_hash_by_number(1).await.unwrap(); + let first_hash = node.eth_block_hash_by_number(1).await.unwrap(); assert_eq!(first_hash, transaction_info.block_hash); assert_eq!(transaction_info.from, alith.address()); assert_eq!(tx_hash0, transaction_info.hash); @@ -704,7 +705,7 @@ async fn test_max_fee_per_gas() { let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); assert_eq!( - "0x30d40", + "0x0", unwrap_response::( node.eth_rpc(EthRequest::EthMaxPriorityFeePerGas(())).await.unwrap() ) diff --git a/crates/anvil-polkadot/tests/it/state_injector.rs b/crates/anvil-polkadot/tests/it/state_injector.rs index 77b270f709ae1..1b9945045ce83 100644 --- a/crates/anvil-polkadot/tests/it/state_injector.rs +++ b/crates/anvil-polkadot/tests/it/state_injector.rs @@ -209,8 +209,7 @@ async fn test_set_balance() { assert_eq!(node.get_balance(alith, None).await, new_balance); - // Send 2 dollars to another account. We'll actually send 3, to cover for the existential - // deposit of 1 dollar. + // Send 2 dollars to another account. let charleth = Account::from(subxt_signer::eth::dev::charleth()); let tx = TransactionRequest::default() .value(U256::from(2e18)) @@ -227,7 +226,7 @@ async fn test_set_balance() { assert_eq!(transaction_receipt.block_number, pallet_revive::U256::from(2)); assert_eq!(transaction_receipt.transaction_hash, tx_hash); - let alith_new_balance = U256::from(2e18) + let alith_new_balance = U256::from(3e18) - AlloyU256::from(transaction_receipt.effective_gas_price * transaction_receipt.gas_used) .inner(); assert_eq!(node.get_balance(alith, None).await, alith_new_balance); diff --git a/crates/anvil-polkadot/tests/it/utils.rs b/crates/anvil-polkadot/tests/it/utils.rs index 73a25c73397b0..e466530f0298d 100644 --- a/crates/anvil-polkadot/tests/it/utils.rs +++ b/crates/anvil-polkadot/tests/it/utils.rs @@ -27,11 +27,15 @@ use codec::Decode; use eyre::{Result, WrapErr}; use futures::{StreamExt, channel::oneshot}; use polkadot_sdk::{ - pallet_revive::evm::{Block, HashesOrTransactionInfos, ReceiptInfo}, + pallet_revive::{ + ReviveApi, + evm::{Block, HashesOrTransactionInfos, ReceiptInfo}, + }, polkadot_sdk_frame::traits::Header, sc_cli::CliConfiguration, sc_client_api::{BlockBackend, BlockchainEvents}, sc_service::TaskManager, + sp_api::ProvideRuntimeApi, sp_core::H256, sp_state_machine::StorageKey, }; @@ -42,9 +46,6 @@ use tempfile::TempDir; use crate::abi::Multicall; -const NATIVE_TO_ETH_RATIO: u128 = 1000000; -pub const EXISTENTIAL_DEPOSIT: u128 = substrate_runtime::currency::DOLLARS * NATIVE_TO_ETH_RATIO; - pub struct BlockWaitTimeout { pub block_number: u32, pub timeout: Duration, @@ -120,6 +121,12 @@ impl TestNode { .ok_or_else(|| eyre::eyre!("no hash for block {}", n)) } + pub async fn eth_block_hash_by_number(&self, n: u32) -> eyre::Result { + let substrate_block_hash = self.block_hash_by_number(n).await?; + + self.resolve_ethereum_hash(substrate_block_hash) + } + /// Execute an ethereum transaction. pub async fn send_transaction( &mut self, @@ -281,8 +288,7 @@ impl TestNode { from_initial_balance - AlloyU256::from(receipt_info.effective_gas_price * receipt_info.gas_used) .inner() - - transfer_amount - - U256::from(EXISTENTIAL_DEPOSIT), + - transfer_amount, "signer's balance should have changed" ); assert_eq!( @@ -325,6 +331,10 @@ impl TestNode { U256::from_str_radix(hex_value, 16).unwrap() } + pub fn resolve_ethereum_hash(&self, substrate_hash: H256) -> eyre::Result { + Ok(self.service.client.runtime_api().eth_block(substrate_hash)?.hash) + } + async fn wait_for_block_with_number(&self, n: u32) { let mut import_stream = self.service.client.import_notification_stream(); diff --git a/crates/forge/tests/cli/revive_vm.rs b/crates/forge/tests/cli/revive_vm.rs index e003c4c1870b1..b05744ddb4ac4 100644 --- a/crates/forge/tests/cli/revive_vm.rs +++ b/crates/forge/tests/cli/revive_vm.rs @@ -1031,7 +1031,7 @@ Traces: │ └─ ← [Stop] ├─ [0] VM::getRecordedLogs() │ └─ ← [Return] [([0x61fb7db3625c10432927a76bb32400c33a94e9bb6374137c4cd59f6e465bfdcb, 0x0000000000000000000000000000000000000000000000000000000000000001], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001043a26051362b8040b289abe93334a5e300000000000000000000000000000000, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496), ([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x104fBc016F4bb334D775a19E8A6510109AC63E00)] - ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::getEmitterAddr() [staticcall] + ├─ [..] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::getEmitterAddr() [staticcall] │ └─ ← [Return] 0x104fBc016F4bb334D775a19E8A6510109AC63E00 └─ ← [Stop] From 3b12df56d22e0ecb568a2c5fb78a48b918f09482 Mon Sep 17 00:00:00 2001 From: Diego Date: Thu, 6 Nov 2025 10:57:16 -0300 Subject: [PATCH 12/44] Fix fmt --- .../substrate_node/lazy_loading/backend.rs | 20 +++++++------------ 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index 43b5a081830ee..8e456589e91bc 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -775,9 +775,8 @@ impl sp_state_machine::StorageIterator sp_state_machine::StorageIterator Date: Fri, 7 Nov 2025 11:24:32 -0300 Subject: [PATCH 13/44] Fix child storage methods --- .../substrate_node/lazy_loading/backend.rs | 66 ++++++++++++------- 1 file changed, 41 insertions(+), 25 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index 8e456589e91bc..bce55448ba96a 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -554,6 +554,7 @@ pub struct BlockImportOperation { new_state: Option>>, aux: Vec<(Vec, Option>)>, storage_updates: StorageCollection, + child_storage_updates: ChildStorageCollection, finalized_blocks: Vec<(Block::Hash, Option)>, set_head: Option, pub(crate) before_fork: bool, @@ -592,6 +593,21 @@ impl BlockImportOperation { if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } }) .collect(); + + self.child_storage_updates = storage + .children_default + .iter() + .map(|(_, child_content)| { + let child_storage: StorageCollection = child_content + .data + .iter() + .map(|(k, v)| { + if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } + }) + .collect(); + (child_content.child_info.storage_key().to_vec(), child_storage) + }) + .collect(); } Ok(root) } @@ -656,9 +672,10 @@ impl backend::BlockImportOperation fn update_storage( &mut self, update: StorageCollection, - _child_update: ChildStorageCollection, + child_update: ChildStorageCollection, ) -> sp_blockchain::Result<()> { self.storage_updates = update; + self.child_storage_updates = child_update; Ok(()) } @@ -1082,18 +1099,18 @@ impl sp_state_machine::Backend Result, Self::Error> { - unimplemented!("child_storage: unsupported feature for lazy loading"); + Ok(self.db.read().child_storage(child_info, key).ok().flatten()) } fn child_storage_hash( &self, - _child_info: &sp_storage::ChildInfo, - _key: &[u8], + child_info: &sp_storage::ChildInfo, + key: &[u8], ) -> Result as sp_core::Hasher>::Out>, Self::Error> { - unimplemented!("child_storage_hash: unsupported feature for lazy loading"); + Ok(self.db.read().child_storage_hash(child_info, key).ok().flatten()) } fn next_storage_key( @@ -1146,10 +1163,10 @@ impl sp_state_machine::Backend Result, Self::Error> { - unimplemented!("next_child_storage_key: unsupported feature for lazy loading"); + Ok(self.db.read().next_child_storage_key(child_info, key).ok().flatten()) } fn storage_root<'a>( @@ -1165,14 +1182,14 @@ impl sp_state_machine::Backend( &self, - _child_info: &sp_storage::ChildInfo, - _delta: impl Iterator)>, - _state_version: StateVersion, + child_info: &sp_storage::ChildInfo, + delta: impl Iterator)>, + state_version: StateVersion, ) -> ( as sp_core::Hasher>::Out, bool, BackendTransaction>) where as sp_core::Hasher>::Out: Ord, { - unimplemented!("child_storage_root: unsupported in lazy loading") + self.db.read().child_storage_root(child_info, delta, state_version) } fn raw_iter(&self, args: sp_state_machine::IterArgs<'_>) -> Result { @@ -1267,6 +1284,7 @@ impl backend::Backend for Backend backend::Backend for Backend, operation.storage_updates)], StateVersion::V1); + { + let mut entries = vec![(None::, operation.storage_updates.clone())]; + if !operation.child_storage_updates.is_empty() { + entries.extend(operation.child_storage_updates.into_iter().map(|(key, data)| { + (Some(ChildInfo::new_default(&key)), data) + })); + } + new_db.write().insert(entries, StateVersion::V1); + } let new_state = ForkedLazyBackend { rpc_client: self.rpc_client.clone(), block_hash: Some(hash), @@ -1461,14 +1485,6 @@ pub fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { return Err(sp_blockchain::Error::InvalidState); } - if storage - .children_default - .keys() - .any(|child_key| !well_known_keys::is_child_storage_key(child_key)) - { - return Err(sp_blockchain::Error::InvalidState); - } - Ok(()) } From e2f203fd88aae269d3303278c42c5d8ec9f7cbc1 Mon Sep 17 00:00:00 2001 From: Diego Date: Fri, 7 Nov 2025 14:56:37 -0300 Subject: [PATCH 14/44] Implement revert and remove leafs Implement revert and remove leaf --- .../substrate_node/lazy_loading/backend.rs | 206 +++++++++++++++++- 1 file changed, 197 insertions(+), 9 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index bce55448ba96a..9445b412dea2e 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -13,7 +13,9 @@ use polkadot_sdk::{ sp_runtime::{ Justification, Justifications, StateVersion, Storage, generic::BlockId, - traits::{Block as BlockT, HashingFor, Header as HeaderT, NumberFor, Zero}, + traits::{ + Block as BlockT, HashingFor, Header as HeaderT, NumberFor, One, Saturating, Zero, + }, }, sp_state_machine::{ self, BackendTransaction, ChildStorageCollection, IndexOperation, StorageCollection, @@ -602,7 +604,11 @@ impl BlockImportOperation { .data .iter() .map(|(k, v)| { - if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } + if v.is_empty() { + (k.clone(), None) + } else { + (k.clone(), Some(v.clone())) + } }) .collect(); (child_content.child_info.storage_key().to_vec(), child_storage) @@ -1323,9 +1329,12 @@ impl backend::Backend for Backend, operation.storage_updates.clone())]; if !operation.child_storage_updates.is_empty() { - entries.extend(operation.child_storage_updates.into_iter().map(|(key, data)| { - (Some(ChildInfo::new_default(&key)), data) - })); + entries.extend( + operation + .child_storage_updates + .into_iter() + .map(|(key, data)| (Some(ChildInfo::new_default(&key)), data)), + ); } new_db.write().insert(entries, StateVersion::V1); } @@ -1447,13 +1456,192 @@ impl backend::Backend for Backend, - _revert_finalized: bool, + n: NumberFor, + revert_finalized: bool, ) -> sp_blockchain::Result<(NumberFor, HashSet)> { - Ok((Zero::zero(), HashSet::new())) + let mut storage = self.blockchain.storage.write(); + + if storage.blocks.is_empty() { + return Ok((Zero::zero(), HashSet::new())); + } + + let mut states = self.states.write(); + let pinned = self.pinned_blocks.read(); + + let mut target = n; + let original_finalized_number = storage.finalized_number; + + if !target.is_zero() && !revert_finalized { + let revertible = storage.best_number.saturating_sub(storage.finalized_number); + if target > revertible { + target = revertible; + } + } + + let mut reverted = NumberFor::::zero(); + let mut reverted_finalized = HashSet::new(); + + let mut current_hash = storage.best_hash.clone(); + let mut current_number = storage.best_number; + + while reverted < target { + if current_number.is_zero() { + break; + } + + if let Some(count) = pinned.get(¤t_hash) { + if *count > 0 { + break; + } + } + + let Some(block) = storage.blocks.get(¤t_hash) else { + break; + }; + + let header = block.header().clone(); + let number = *header.number(); + let parent_hash = header.parent_hash().clone(); + let parent_number = number.saturating_sub(One::one()); + + let parent_becomes_leaf = if number.is_zero() { + false + } else { + !storage.blocks.iter().any(|(other_hash, stored)| { + *other_hash != current_hash && stored.header().parent_hash() == &parent_hash + }) + }; + + let hash_to_remove = current_hash.clone(); + + storage.blocks.remove(&hash_to_remove); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash_to_remove { + storage.hashes.remove(&number); + } + } + states.remove(&hash_to_remove); + + storage.leaves.remove( + hash_to_remove.clone(), + number, + parent_becomes_leaf.then_some(parent_hash.clone()), + ); + + if number <= original_finalized_number { + reverted_finalized.insert(hash_to_remove); + } + + reverted = reverted.saturating_add(One::one()); + + current_hash = parent_hash; + current_number = parent_number; + + storage.best_hash = current_hash; + storage.best_number = current_number; + } + + let best_hash_after = storage.best_hash.clone(); + let best_number_after = storage.best_number; + let extra_leaves: Vec<_> = + storage.leaves.revert(best_hash_after.clone(), best_number_after).collect(); + + for (hash, number) in extra_leaves { + if let Some(count) = pinned.get(&hash) { + if *count > 0 { + return Err(sp_blockchain::Error::Backend(format!( + "Can't revert pinned block {hash:?}", + ))); + } + } + + storage.blocks.remove(&hash); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash { + storage.hashes.remove(&number); + } + } + states.remove(&hash); + + if number <= original_finalized_number { + reverted_finalized.insert(hash); + } + } + + storage.hashes.insert(best_number_after, best_hash_after.clone()); + + if storage.finalized_number > best_number_after { + storage.finalized_number = best_number_after; + } + + while storage.finalized_number > Zero::zero() + && !storage.hashes.contains_key(&storage.finalized_number) + { + storage.finalized_number = storage.finalized_number.saturating_sub(One::one()); + } + + if let Some(hash) = storage.hashes.get(&storage.finalized_number).cloned() { + storage.finalized_hash = hash; + } else { + storage.finalized_hash = storage.genesis_hash; + } + + drop(pinned); + drop(states); + + Ok((reverted, reverted_finalized)) } - fn remove_leaf_block(&self, _hash: Block::Hash) -> sp_blockchain::Result<()> { + fn remove_leaf_block(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { + let best_hash = self.blockchain.info().best_hash; + + if best_hash == hash { + return Err(sp_blockchain::Error::Backend( + format!("Can't remove best block {hash:?}",), + )); + } + + let mut storage = self.blockchain.storage.write(); + + let Some(block) = storage.blocks.get(&hash) else { + return Err(sp_blockchain::Error::UnknownBlock(format!("{hash:?}"))); + }; + + let number = *block.header().number(); + let parent_hash = *block.header().parent_hash(); + + if !storage.leaves.contains(number, hash.clone()) { + return Err(sp_blockchain::Error::Backend(format!( + "Can't remove non-leaf block {hash:?}", + ))); + } + + if self.pinned_blocks.read().get(&hash).map_or(false, |count| *count > 0) { + return Err(sp_blockchain::Error::Backend(format!( + "Can't remove pinned block {hash:?}", + ))); + } + + let parent_becomes_leaf = if number.is_zero() { + false + } else { + !storage.blocks.iter().any(|(other_hash, stored)| { + *other_hash != hash && stored.header().parent_hash() == &parent_hash + }) + }; + + let mut states = self.states.write(); + + storage.blocks.remove(&hash); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash { + storage.hashes.remove(&number); + } + } + states.remove(&hash); + + storage.leaves.remove(hash, number, parent_becomes_leaf.then_some(parent_hash)); + Ok(()) } From 306e2895e570b56c386290ca9e5e5c8b1f624469 Mon Sep 17 00:00:00 2001 From: Diego Date: Fri, 7 Nov 2025 17:28:44 -0300 Subject: [PATCH 15/44] Fix commit operation --- .../substrate_node/lazy_loading/backend.rs | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index 9445b412dea2e..60b816728d54c 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -1316,28 +1316,32 @@ impl backend::Backend for Backend, operation.storage_updates.clone())]; - if !operation.child_storage_updates.is_empty() { + let mut entries = vec![(None::, storage_updates.clone())]; + if !child_storage_updates.is_empty() { entries.extend( - operation - .child_storage_updates - .into_iter() - .map(|(key, data)| (Some(ChildInfo::new_default(&key)), data)), + child_storage_updates + .iter() + .map(|(key, data)| (Some(ChildInfo::new_default(key)), data.clone())), ); } - new_db.write().insert(entries, StateVersion::V1); + db_clone.insert(entries, StateVersion::V1); } + let new_db = Arc::new(parking_lot::RwLock::new(db_clone)); let new_state = ForkedLazyBackend { rpc_client: self.rpc_client.clone(), block_hash: Some(hash), From 697632d2eed92ae49fad4498e6ceaae3dab31d0a Mon Sep 17 00:00:00 2001 From: Diego Date: Fri, 7 Nov 2025 17:33:23 -0300 Subject: [PATCH 16/44] Cleanup some comments and logs --- .../substrate_node/lazy_loading/backend.rs | 63 +------------------ 1 file changed, 1 insertion(+), 62 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index 60b816728d54c..b3bcc4cb5c4a3 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -110,11 +110,6 @@ pub struct Blockchain { impl Blockchain { /// Create new in-memory blockchain storage. fn new(rpc_client: Option>>) -> Self { - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "🏗️ Creating new Blockchain storage (empty)" - ); - let storage = Arc::new(parking_lot::RwLock::new(BlockchainStorage { blocks: HashMap::new(), hashes: HashMap::new(), @@ -139,27 +134,10 @@ impl Blockchain { match id { BlockId::Hash(h) => Some(h), BlockId::Number(n) => { - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "Looking up block hash for number={n}", - ); - let block_hash = self.storage.read().hashes.get(&n).copied(); - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "Lookup result: number={}, found={}, total_hashes={}", - n, - block_hash.is_some(), - self.storage.read().hashes.len() - ); - match block_hash { None => { - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "Block hash not found locally, trying RPC for number={n}", - ); let block_hash = self.rpc().and_then(|rpc| rpc.block_hash(Some(n)).ok().flatten()); if let Some(h) = block_hash { @@ -184,11 +162,6 @@ impl Blockchain { ) -> sp_blockchain::Result<()> { let number = *header.number(); - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "Inserting block: number={number}, hash={hash:?}, new_state={new_state:?}", - ); - if new_state.is_best() { self.apply_head(&header)?; } @@ -199,15 +172,6 @@ impl Blockchain { storage.blocks.insert(hash, StoredBlock::new(header.clone(), body, justifications)); storage.hashes.insert(number, hash); - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "Block inserted successfully: number={}, hash={:?}. Total blocks={}, Total hashes={}", - number, - hash, - storage.blocks.len(), - storage.hashes.len() - ); - // Set genesis_hash only for the first block inserted if storage.blocks.len() == 1 { storage.genesis_hash = hash; @@ -230,12 +194,6 @@ impl Blockchain { /// Get total number of blocks. pub fn blocks_count(&self) -> usize { let count = self.storage.read().blocks.len(); - - log::debug!( - target: super::LAZY_LOADING_LOG_TARGET, - "Total number of blocks: {count:?}", - ); - count } @@ -388,27 +346,12 @@ impl HeaderBackend for Blockchain blockchain::Info { let storage = self.storage.read(); - // Return None for finalized_state when blockchain is empty or only has genesis block - // This allows Client::new to properly initialize and complete genesis setup - // finalized_state should only be Some() when there are blocks beyond genesis let finalized_state = if storage.blocks.len() <= 1 { None } else { Some((storage.finalized_hash, storage.finalized_number)) }; - log::info!( - target: super::LAZY_LOADING_LOG_TARGET, - "📊 Blockchain::info() - blocks={}, best_hash={:?}, best_number={}, genesis_hash={:?}, finalized_hash={:?}, finalized_number={}, finalized_state={:?}", - storage.blocks.len(), - storage.best_hash, - storage.best_number, - storage.genesis_hash, - storage.finalized_hash, - storage.finalized_number, - finalized_state - ); - blockchain::Info { best_hash: storage.best_hash, best_number: storage.best_number, @@ -707,10 +650,7 @@ impl backend::BlockImportOperation Ok(()) } - fn set_create_gap(&mut self, _create_gap: bool) { - // This implementation can be left empty or implemented as needed - // For now, we're just implementing the trait method with no functionality - } + fn set_create_gap(&mut self, _create_gap: bool) {} } /// DB-backed patricia trie state, transaction type is an overaay of changes to commit. @@ -1015,7 +955,6 @@ impl sp_state_machine::Backend Date: Mon, 10 Nov 2025 13:12:34 -0300 Subject: [PATCH 17/44] Fix clippy and fmt --- .../substrate_node/lazy_loading/backend.rs | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs index b3bcc4cb5c4a3..0e1d313996329 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs @@ -193,8 +193,7 @@ impl Blockchain { /// Get total number of blocks. pub fn blocks_count(&self) -> usize { - let count = self.storage.read().blocks.len(); - count + self.storage.read().blocks.len() } /// Compare this blockchain with another in-mem blockchain @@ -541,8 +540,8 @@ impl BlockImportOperation { self.child_storage_updates = storage .children_default - .iter() - .map(|(_, child_content)| { + .values() + .map(|child_content| { let child_storage: StorageCollection = child_content .data .iter() @@ -1424,7 +1423,7 @@ impl backend::Backend for Backend::zero(); let mut reverted_finalized = HashSet::new(); - let mut current_hash = storage.best_hash.clone(); + let mut current_hash = storage.best_hash; let mut current_number = storage.best_number; while reverted < target { @@ -1444,18 +1443,18 @@ impl backend::Backend for Backend backend::Backend for Backend backend::Backend for Backend = - storage.leaves.revert(best_hash_after.clone(), best_number_after).collect(); + storage.leaves.revert(best_hash_after, best_number_after).collect(); for (hash, number) in extra_leaves { if let Some(count) = pinned.get(&hash) { @@ -1511,7 +1510,7 @@ impl backend::Backend for Backend best_number_after { storage.finalized_number = best_number_after; @@ -1523,7 +1522,7 @@ impl backend::Backend for Backend backend::Backend for Backend 0) { + if self.pinned_blocks.read().get(&hash).is_some_and(|count| *count > 0) { return Err(sp_blockchain::Error::Backend(format!( "Can't remove pinned block {hash:?}", ))); From 18906cb7717df04ad2bd321fd00520b1eeaee8a8 Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 10 Nov 2025 14:56:23 -0300 Subject: [PATCH 18/44] split lazy-loading backend into dedicated modules --- .../substrate_node/lazy_loading/backend.rs | 2001 ----------------- .../backend/block_import_operation.rs | 188 ++ .../lazy_loading/backend/blockchain.rs | 474 ++++ .../backend/forked_lazy_backend.rs | 530 +++++ .../lazy_loading/backend/mod.rs | 482 ++++ .../lazy_loading/backend/tests.rs | 364 +++ 6 files changed, 2038 insertions(+), 2001 deletions(-) delete mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs create mode 100644 crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs deleted file mode 100644 index 0e1d313996329..0000000000000 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend.rs +++ /dev/null @@ -1,2001 +0,0 @@ -use super::rpc_client::RPCClient; -use alloy_primitives::hex; -use polkadot_sdk::{ - sc_client_api::{ - StorageKey, TrieCacheContext, UsageInfo, - backend::{self, NewBlockState}, - blockchain::{self, BlockStatus, HeaderBackend}, - leaves::LeafSet, - }, - sc_service::Error, - sp_blockchain::{self, CachedHeaderMetadata, HeaderMetadata}, - sp_core::{self, H256, offchain::storage::InMemOffchainStorage, storage::well_known_keys}, - sp_runtime::{ - Justification, Justifications, StateVersion, Storage, - generic::BlockId, - traits::{ - Block as BlockT, HashingFor, Header as HeaderT, NumberFor, One, Saturating, Zero, - }, - }, - sp_state_machine::{ - self, BackendTransaction, ChildStorageCollection, IndexOperation, StorageCollection, - TrieBackend, - }, - sp_storage::{self, ChildInfo}, - sp_trie::{self, PrefixedMemoryDB}, -}; -use serde::de::DeserializeOwned; -use std::{ - collections::{HashMap, HashSet}, - marker::PhantomData, - ptr, - sync::Arc, -}; - -struct PendingBlock { - block: StoredBlock, - state: NewBlockState, -} - -#[derive(PartialEq, Eq, Clone)] -enum StoredBlock { - Header(B::Header, Option), - Full(B, Option), -} - -impl StoredBlock { - fn new( - header: B::Header, - body: Option>, - just: Option, - ) -> Self { - match body { - Some(body) => Self::Full(B::new(header, body), just), - None => Self::Header(header, just), - } - } - - fn header(&self) -> &B::Header { - match *self { - Self::Header(ref h, _) => h, - Self::Full(ref b, _) => b.header(), - } - } - - fn justifications(&self) -> Option<&Justifications> { - match *self { - Self::Header(_, ref j) | Self::Full(_, ref j) => j.as_ref(), - } - } - - fn extrinsics(&self) -> Option<&[B::Extrinsic]> { - match *self { - Self::Header(_, _) => None, - Self::Full(ref b, _) => Some(b.extrinsics()), - } - } - - fn into_inner(self) -> (B::Header, Option>, Option) { - match self { - Self::Header(header, just) => (header, None, just), - Self::Full(block, just) => { - let (header, body) = block.deconstruct(); - (header, Some(body), just) - } - } - } -} - -#[derive(Clone)] -struct BlockchainStorage { - blocks: HashMap>, - hashes: HashMap, Block::Hash>, - best_hash: Block::Hash, - best_number: NumberFor, - finalized_hash: Block::Hash, - finalized_number: NumberFor, - genesis_hash: Block::Hash, - header_cht_roots: HashMap, Block::Hash>, - leaves: LeafSet>, - aux: HashMap, Vec>, -} - -/// In-memory blockchain. Supports concurrent reads. -#[derive(Clone)] -pub struct Blockchain { - rpc_client: Option>>, - storage: Arc>>, -} - -impl Blockchain { - /// Create new in-memory blockchain storage. - fn new(rpc_client: Option>>) -> Self { - let storage = Arc::new(parking_lot::RwLock::new(BlockchainStorage { - blocks: HashMap::new(), - hashes: HashMap::new(), - best_hash: Default::default(), - best_number: Zero::zero(), - finalized_hash: Default::default(), - finalized_number: Zero::zero(), - genesis_hash: Default::default(), - header_cht_roots: HashMap::new(), - leaves: LeafSet::new(), - aux: HashMap::new(), - })); - Self { rpc_client, storage } - } - #[inline] - fn rpc(&self) -> Option<&dyn RPCClient> { - self.rpc_client.as_deref() - } - - /// Get header hash of given block. - pub fn id(&self, id: BlockId) -> Option { - match id { - BlockId::Hash(h) => Some(h), - BlockId::Number(n) => { - let block_hash = self.storage.read().hashes.get(&n).copied(); - - match block_hash { - None => { - let block_hash = - self.rpc().and_then(|rpc| rpc.block_hash(Some(n)).ok().flatten()); - if let Some(h) = block_hash { - self.storage.write().hashes.insert(n, h); - } - block_hash - } - block_hash => block_hash, - } - } - } - } - - /// Insert a block header and associated data. - pub fn insert( - &self, - hash: Block::Hash, - header: ::Header, - justifications: Option, - body: Option::Extrinsic>>, - new_state: NewBlockState, - ) -> sp_blockchain::Result<()> { - let number = *header.number(); - - if new_state.is_best() { - self.apply_head(&header)?; - } - - let mut storage = self.storage.write(); - - // Always insert the block into blocks and hashes storage - storage.blocks.insert(hash, StoredBlock::new(header.clone(), body, justifications)); - storage.hashes.insert(number, hash); - - // Set genesis_hash only for the first block inserted - if storage.blocks.len() == 1 { - storage.genesis_hash = hash; - } - - // Update leaves for non-genesis blocks - if storage.blocks.len() > 1 { - storage.leaves.import(hash, number, *header.parent_hash()); - } - - // Finalize block only if explicitly requested via new_state - if let NewBlockState::Final = new_state { - storage.finalized_hash = hash; - storage.finalized_number = number; - } - - Ok(()) - } - - /// Get total number of blocks. - pub fn blocks_count(&self) -> usize { - self.storage.read().blocks.len() - } - - /// Compare this blockchain with another in-mem blockchain - pub fn equals_to(&self, other: &Self) -> bool { - // Check ptr equality first to avoid double read locks. - if ptr::eq(self, other) { - return true; - } - self.canon_equals_to(other) && self.storage.read().blocks == other.storage.read().blocks - } - - /// Compare canonical chain to other canonical chain. - pub fn canon_equals_to(&self, other: &Self) -> bool { - // Check ptr equality first to avoid double read locks. - if ptr::eq(self, other) { - return true; - } - let this = self.storage.read(); - let other = other.storage.read(); - this.hashes == other.hashes - && this.best_hash == other.best_hash - && this.best_number == other.best_number - && this.genesis_hash == other.genesis_hash - } - - /// Insert header CHT root. - pub fn insert_cht_root(&self, block: NumberFor, cht_root: Block::Hash) { - self.storage.write().header_cht_roots.insert(block, cht_root); - } - - /// Set an existing block as head. - pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { - let header = self - .header(hash)? - .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{hash:?}")))?; - - self.apply_head(&header) - } - - fn apply_head(&self, header: &::Header) -> sp_blockchain::Result<()> { - let mut storage = self.storage.write(); - - let hash = header.hash(); - let number = header.number(); - - storage.best_hash = hash; - storage.best_number = *number; - storage.hashes.insert(*number, hash); - - Ok(()) - } - - fn finalize_header( - &self, - block: Block::Hash, - justification: Option, - ) -> sp_blockchain::Result<()> { - let mut storage = self.storage.write(); - storage.finalized_hash = block; - - if justification.is_some() { - let block = storage - .blocks - .get_mut(&block) - .expect("hash was fetched from a block in the db; qed"); - - let block_justifications = match block { - StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, - }; - - *block_justifications = justification.map(Justifications::from); - } - - Ok(()) - } - - fn append_justification( - &self, - hash: Block::Hash, - justification: Justification, - ) -> sp_blockchain::Result<()> { - let mut storage = self.storage.write(); - - let block = - storage.blocks.get_mut(&hash).expect("hash was fetched from a block in the db; qed"); - - let block_justifications = match block { - StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, - }; - - if let Some(stored_justifications) = block_justifications { - if !stored_justifications.append(justification) { - return Err(sp_blockchain::Error::BadJustification( - "Duplicate consensus engine ID".into(), - )); - } - } else { - *block_justifications = Some(Justifications::from(justification)); - }; - - Ok(()) - } - - fn write_aux(&self, ops: Vec<(Vec, Option>)>) { - let mut storage = self.storage.write(); - for (k, v) in ops { - match v { - Some(v) => storage.aux.insert(k, v), - None => storage.aux.remove(&k), - }; - } - } -} - -impl HeaderBackend for Blockchain { - fn header( - &self, - hash: Block::Hash, - ) -> sp_blockchain::Result::Header>> { - // First, try to get the header from local storage - if let Some(header) = self.storage.read().blocks.get(&hash).map(|b| b.header().clone()) { - return Ok(Some(header)); - } - - // If not found in local storage, fetch from RPC client - let header = if let Some(rpc) = self.rpc() { - rpc.block(Some(hash)).ok().flatten().map(|full| { - let block = full.block.clone(); - self.storage - .write() - .blocks - .insert(hash, StoredBlock::Full(block.clone(), full.justifications)); - block.header().clone() - }) - } else { - None - }; - - if header.is_none() { - log::warn!( - target: super::LAZY_LOADING_LOG_TARGET, - "Expected block {:x?} to exist.", - &hash - ); - } - - Ok(header) - } - - fn info(&self) -> blockchain::Info { - let storage = self.storage.read(); - let finalized_state = if storage.blocks.len() <= 1 { - None - } else { - Some((storage.finalized_hash, storage.finalized_number)) - }; - - blockchain::Info { - best_hash: storage.best_hash, - best_number: storage.best_number, - genesis_hash: storage.genesis_hash, - finalized_hash: storage.finalized_hash, - finalized_number: storage.finalized_number, - finalized_state, - number_leaves: storage.leaves.count(), - block_gap: None, - } - } - - fn status(&self, hash: Block::Hash) -> sp_blockchain::Result { - match self.storage.read().blocks.contains_key(&hash) { - true => Ok(BlockStatus::InChain), - false => Ok(BlockStatus::Unknown), - } - } - - fn number(&self, hash: Block::Hash) -> sp_blockchain::Result>> { - if let Some(b) = self.storage.read().blocks.get(&hash) { - return Ok(Some(*b.header().number())); - } - match self.rpc() { - Some(rpc) => match rpc.block(Some(hash)) { - Ok(Some(block)) => Ok(Some(*block.block.header().number())), - err => Err(sp_blockchain::Error::UnknownBlock(format!( - "Failed to fetch block number from RPC: {err:?}" - ))), - }, - None => Err(sp_blockchain::Error::UnknownBlock( - "RPC not configured to resolve block number".into(), - )), - } - } - - fn hash( - &self, - number: <::Header as HeaderT>::Number, - ) -> sp_blockchain::Result> { - Ok(self.id(BlockId::Number(number))) - } -} - -impl HeaderMetadata for Blockchain { - type Error = sp_blockchain::Error; - - fn header_metadata( - &self, - hash: Block::Hash, - ) -> Result, Self::Error> { - self.header(hash)?.map(|header| CachedHeaderMetadata::from(&header)).ok_or_else(|| { - sp_blockchain::Error::UnknownBlock(format!("header not found: {hash:?}")) - }) - } - - fn insert_header_metadata(&self, _hash: Block::Hash, _metadata: CachedHeaderMetadata) { - // No need to implement. - unimplemented!("insert_header_metadata") - } - fn remove_header_metadata(&self, _hash: Block::Hash) { - // No need to implement. - unimplemented!("remove_header_metadata") - } -} - -impl blockchain::Backend for Blockchain { - fn body( - &self, - hash: Block::Hash, - ) -> sp_blockchain::Result::Extrinsic>>> { - if let Some(xs) = - self.storage.read().blocks.get(&hash).and_then(|b| b.extrinsics().map(|x| x.to_vec())) - { - return Ok(Some(xs)); - } - let extrinsics = self.rpc().and_then(|rpc| { - rpc.block(Some(hash)).ok().flatten().map(|b| b.block.extrinsics().to_vec()) - }); - Ok(extrinsics) - } - - fn justifications(&self, hash: Block::Hash) -> sp_blockchain::Result> { - Ok(self.storage.read().blocks.get(&hash).and_then(|b| b.justifications().cloned())) - } - - fn last_finalized(&self) -> sp_blockchain::Result { - let last_finalized = self.storage.read().finalized_hash; - - Ok(last_finalized) - } - - fn leaves(&self) -> sp_blockchain::Result> { - let leaves = self.storage.read().leaves.hashes(); - - Ok(leaves) - } - - fn children(&self, _parent_hash: Block::Hash) -> sp_blockchain::Result> { - unimplemented!("Not supported by the `lazy-loading` backend.") - } - - fn indexed_transaction(&self, _hash: Block::Hash) -> sp_blockchain::Result>> { - unimplemented!("Not supported by the `lazy-loading` backend.") - } - - fn block_indexed_body( - &self, - _hash: Block::Hash, - ) -> sp_blockchain::Result>>> { - unimplemented!("Not supported by the `lazy-loading` backend.") - } -} - -impl backend::AuxStore for Blockchain { - fn insert_aux< - 'a, - 'b: 'a, - 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >( - &self, - insert: I, - delete: D, - ) -> sp_blockchain::Result<()> { - let mut storage = self.storage.write(); - for (k, v) in insert { - storage.aux.insert(k.to_vec(), v.to_vec()); - } - for k in delete { - storage.aux.remove(*k); - } - Ok(()) - } - - fn get_aux(&self, key: &[u8]) -> sp_blockchain::Result>> { - Ok(self.storage.read().aux.get(key).cloned()) - } -} - -pub struct BlockImportOperation { - pending_block: Option>, - old_state: ForkedLazyBackend, - new_state: Option>>, - aux: Vec<(Vec, Option>)>, - storage_updates: StorageCollection, - child_storage_updates: ChildStorageCollection, - finalized_blocks: Vec<(Block::Hash, Option)>, - set_head: Option, - pub(crate) before_fork: bool, -} - -impl BlockImportOperation { - fn apply_storage( - &mut self, - storage: Storage, - commit: bool, - state_version: StateVersion, - ) -> sp_blockchain::Result { - use sp_state_machine::Backend; - check_genesis_storage(&storage)?; - - let child_delta = storage.children_default.values().map(|child_content| { - ( - &child_content.child_info, - child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), - ) - }); - - let (root, transaction) = self.old_state.full_storage_root( - storage.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), - child_delta, - state_version, - ); - - if commit { - self.new_state = Some(transaction); - self.storage_updates = - storage - .top - .iter() - .map(|(k, v)| { - if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } - }) - .collect(); - - self.child_storage_updates = storage - .children_default - .values() - .map(|child_content| { - let child_storage: StorageCollection = child_content - .data - .iter() - .map(|(k, v)| { - if v.is_empty() { - (k.clone(), None) - } else { - (k.clone(), Some(v.clone())) - } - }) - .collect(); - (child_content.child_info.storage_key().to_vec(), child_storage) - }) - .collect(); - } - Ok(root) - } -} - -impl backend::BlockImportOperation - for BlockImportOperation -{ - type State = ForkedLazyBackend; - - fn state(&self) -> sp_blockchain::Result> { - Ok(Some(&self.old_state)) - } - - fn set_block_data( - &mut self, - header: ::Header, - body: Option::Extrinsic>>, - _indexed_body: Option>>, - justifications: Option, - state: NewBlockState, - ) -> sp_blockchain::Result<()> { - assert!(self.pending_block.is_none(), "Only one block per operation is allowed"); - self.pending_block = - Some(PendingBlock { block: StoredBlock::new(header, body, justifications), state }); - Ok(()) - } - - fn update_db_storage( - &mut self, - update: BackendTransaction>, - ) -> sp_blockchain::Result<()> { - self.new_state = Some(update); - Ok(()) - } - - fn set_genesis_state( - &mut self, - storage: Storage, - commit: bool, - state_version: StateVersion, - ) -> sp_blockchain::Result { - self.apply_storage(storage, commit, state_version) - } - - fn reset_storage( - &mut self, - storage: Storage, - state_version: StateVersion, - ) -> sp_blockchain::Result { - self.apply_storage(storage, true, state_version) - } - - fn insert_aux(&mut self, ops: I) -> sp_blockchain::Result<()> - where - I: IntoIterator, Option>)>, - { - self.aux.append(&mut ops.into_iter().collect()); - Ok(()) - } - - fn update_storage( - &mut self, - update: StorageCollection, - child_update: ChildStorageCollection, - ) -> sp_blockchain::Result<()> { - self.storage_updates = update; - self.child_storage_updates = child_update; - Ok(()) - } - - fn mark_finalized( - &mut self, - hash: Block::Hash, - justification: Option, - ) -> sp_blockchain::Result<()> { - self.finalized_blocks.push((hash, justification)); - Ok(()) - } - - fn mark_head(&mut self, hash: Block::Hash) -> sp_blockchain::Result<()> { - assert!(self.pending_block.is_none(), "Only one set block per operation is allowed"); - self.set_head = Some(hash); - Ok(()) - } - - fn update_transaction_index( - &mut self, - _index: Vec, - ) -> sp_blockchain::Result<()> { - Ok(()) - } - - fn set_create_gap(&mut self, _create_gap: bool) {} -} - -/// DB-backed patricia trie state, transaction type is an overaay of changes to commit. -pub type DbState = TrieBackend>>, HashingFor>; - -/// A struct containing arguments for iterating over the storage. -#[derive(Default)] -pub struct RawIterArgs { - /// The prefix of the keys over which to iterate. - pub prefix: Option>, - - /// The prefix from which to start the iteration from. - /// - /// This is inclusive and the iteration will include the key which is specified here. - pub start_at: Option>, -} - -/// A raw iterator over the `BenchmarkingState`. -pub struct RawIter { - pub(crate) args: RawIterArgs, - complete: bool, - _phantom: PhantomData, -} - -impl sp_state_machine::StorageIterator> - for RawIter -{ - type Backend = ForkedLazyBackend; - type Error = String; - - fn next_key( - &mut self, - backend: &Self::Backend, - ) -> Option> { - use sp_state_machine::Backend; - - let remote_fetch = - |key: Option, start_key: Option, block: Option| { - backend - .rpc() - .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) - .and_then(|keys| keys.first().cloned()) - }; - - let prefix = self.args.prefix.clone().map(StorageKey); - let start_key = self.args.start_at.clone().map(StorageKey); - - let maybe_next_key = if backend.before_fork { - // If RPC client is available, fetch remotely - if backend.rpc().is_some() { - remote_fetch(prefix, start_key, backend.block_hash) - } else { - // No RPC client, use local DB - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.start_at = self.args.start_at.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - } - } else { - // First, try to get next key from local DB - let next_storage_key = if let Some(ref start) = self.args.start_at { - // If we have a start_at, use next_storage_key to get the next one after it - backend.db.read().next_storage_key(start).ok().flatten() - } else { - // No start_at, use raw_iter to get the first key with the prefix - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - }; - - // Filter by prefix if necessary - let next_storage_key = next_storage_key - .filter(|key| prefix.as_ref().map(|p| key.starts_with(&p.0)).unwrap_or(true)); - - let removed_key = start_key - .clone() - .or(prefix.clone()) - .map(|key| backend.removed_keys.read().contains_key(&key.0)) - .unwrap_or(false); - if next_storage_key.is_none() && !removed_key { - let maybe_next_key = if backend.rpc().is_some() { - remote_fetch(prefix, start_key, Some(backend.fork_block)) - } else { - None - }; - match maybe_next_key { - Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), - _ => None, - } - } else { - next_storage_key - } - }; - - log::trace!( - target: super::LAZY_LOADING_LOG_TARGET, - "next_key: (prefix: {:?}, start_at: {:?}, next_key: {:?})", - self.args.prefix.clone().map(hex::encode), - self.args.start_at.clone().map(hex::encode), - maybe_next_key.clone().map(hex::encode) - ); - - if let Some(next_key) = maybe_next_key { - if self - .args - .prefix - .clone() - .map(|filter_key| next_key.starts_with(&filter_key)) - .unwrap_or(false) - { - self.args.start_at = Some(next_key.clone()); - Some(Ok(next_key)) - } else { - self.complete = true; - None - } - } else { - self.complete = true; - None - } - } - - fn next_pair( - &mut self, - backend: &Self::Backend, - ) -> Option> - { - use sp_state_machine::Backend; - - let remote_fetch = - |key: Option, start_key: Option, block: Option| { - backend - .rpc() - .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) - .and_then(|keys| keys.first().cloned()) - }; - - let prefix = self.args.prefix.clone().map(StorageKey); - let start_key = self.args.start_at.clone().map(StorageKey); - - let maybe_next_key = if backend.before_fork { - // If RPC client is available, fetch remotely - if backend.rpc().is_some() { - remote_fetch(prefix, start_key, backend.block_hash) - } else { - // No RPC client, use local DB - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.start_at = self.args.start_at.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - } - } else { - // First, try to get next key from local DB - let next_storage_key = if let Some(ref start) = self.args.start_at { - // If we have a start_at, use next_storage_key to get the next one after it - backend.db.read().next_storage_key(start).ok().flatten() - } else { - // No start_at, use raw_iter to get the first key with the prefix - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - }; - - // Filter by prefix if necessary - let next_storage_key = next_storage_key - .filter(|key| prefix.as_ref().map(|p| key.starts_with(&p.0)).unwrap_or(true)); - - let removed_key = start_key - .clone() - .or(prefix.clone()) - .map(|key| backend.removed_keys.read().contains_key(&key.0)) - .unwrap_or(false); - if next_storage_key.is_none() && !removed_key { - let maybe_next_key = if backend.rpc().is_some() { - remote_fetch(prefix, start_key, Some(backend.fork_block)) - } else { - None - }; - match maybe_next_key { - Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), - _ => None, - } - } else { - next_storage_key - } - }; - - log::trace!( - target: super::LAZY_LOADING_LOG_TARGET, - "next_pair: (prefix: {:?}, start_at: {:?}, next_key: {:?})", - self.args.prefix.clone().map(hex::encode), - self.args.start_at.clone().map(hex::encode), - maybe_next_key.clone().map(hex::encode) - ); - - let maybe_value = maybe_next_key - .clone() - .and_then(|key| (*backend).storage(key.as_slice()).ok()) - .flatten(); - - if let Some(next_key) = maybe_next_key { - if self - .args - .prefix - .clone() - .map(|filter_key| next_key.starts_with(&filter_key)) - .unwrap_or(false) - { - self.args.start_at = Some(next_key.clone()); - maybe_value.map(|value| Ok((next_key, value))) - } else { - self.complete = true; - None - } - } else { - self.complete = true; - None - } - } - - fn was_complete(&self) -> bool { - self.complete - } -} - -#[derive(Debug, Clone)] -pub struct ForkedLazyBackend { - rpc_client: Option>>, - block_hash: Option, - fork_block: Block::Hash, - pub(crate) db: Arc>>>, - pub(crate) removed_keys: Arc, ()>>>, - before_fork: bool, -} - -impl ForkedLazyBackend { - fn update_storage(&self, key: &[u8], value: &Option>) { - if let Some(val) = value { - let mut entries: HashMap, StorageCollection> = Default::default(); - entries.insert(None, vec![(key.to_vec(), Some(val.clone()))]); - - self.db.write().insert(entries, StateVersion::V1); - } - } - - #[inline] - fn rpc(&self) -> Option<&dyn RPCClient> { - self.rpc_client.as_deref() - } -} - -impl sp_state_machine::Backend> - for ForkedLazyBackend -{ - type Error = as sp_state_machine::Backend>>::Error; - type TrieBackendStorage = PrefixedMemoryDB>; - type RawIter = RawIter; - - fn storage(&self, key: &[u8]) -> Result, Self::Error> { - let remote_fetch = |block: Option| -> Option> { - self.rpc() - .and_then(|rpc| rpc.storage(StorageKey(key.to_vec()), block).ok()) - .flatten() - .map(|v| v.0) - }; - - // When before_fork, try RPC first, then fall back to local DB - if self.before_fork { - if self.rpc().is_some() { - return Ok(remote_fetch(self.block_hash)); - } else { - // No RPC client, try to read from local DB - let readable_db = self.db.read(); - return Ok(readable_db.storage(key).ok().flatten()); - } - } - - let readable_db = self.db.read(); - let maybe_storage = readable_db.storage(key); - let value = match maybe_storage { - Ok(Some(data)) => Some(data), - _ if !self.removed_keys.read().contains_key(key) => { - // Only try remote fetch if RPC client is available - let result = - if self.rpc().is_some() { remote_fetch(Some(self.fork_block)) } else { None }; - - // Cache state - drop(readable_db); - self.update_storage(key, &result); - - result - } - _ => None, - }; - - Ok(value) - } - - fn storage_hash( - &self, - key: &[u8], - ) -> Result as sp_core::Hasher>::Out>, Self::Error> { - let remote_fetch = |block: Option| -> Result< - Option< as sp_core::Hasher>::Out>, - Self::Error, - > { - match self.rpc() { - Some(rpc) => rpc - .storage_hash(StorageKey(key.to_vec()), block) - .map_err(|e| format!("Failed to fetch storage hash from RPC: {e:?}")), - None => Ok(None), - } - }; - - // When before_fork, try RPC first, then fall back to local DB - if self.before_fork { - if self.rpc().is_some() { - return remote_fetch(self.block_hash); - } else { - // No RPC client, try to read from local DB - return Ok(self.db.read().storage_hash(key).ok().flatten()); - } - } - - let storage_hash = self.db.read().storage_hash(key); - match storage_hash { - Ok(Some(hash)) => Ok(Some(hash)), - _ if !self.removed_keys.read().contains_key(key) => { - if self.rpc().is_some() { - remote_fetch(Some(self.fork_block)) - } else { - Ok(None) - } - } - _ => Ok(None), - } - } - - fn closest_merkle_value( - &self, - _key: &[u8], - ) -> Result< - Option as sp_core::Hasher>::Out>>, - Self::Error, - > { - unimplemented!("closest_merkle_value: unsupported feature for lazy loading") - } - - fn child_closest_merkle_value( - &self, - _child_info: &sp_storage::ChildInfo, - _key: &[u8], - ) -> Result< - Option as sp_core::Hasher>::Out>>, - Self::Error, - > { - unimplemented!("child_closest_merkle_value: unsupported feature for lazy loading") - } - - fn child_storage( - &self, - child_info: &sp_storage::ChildInfo, - key: &[u8], - ) -> Result, Self::Error> { - Ok(self.db.read().child_storage(child_info, key).ok().flatten()) - } - - fn child_storage_hash( - &self, - child_info: &sp_storage::ChildInfo, - key: &[u8], - ) -> Result as sp_core::Hasher>::Out>, Self::Error> { - Ok(self.db.read().child_storage_hash(child_info, key).ok().flatten()) - } - - fn next_storage_key( - &self, - key: &[u8], - ) -> Result, Self::Error> { - let remote_fetch = |block: Option| { - let start_key = Some(StorageKey(key.to_vec())); - self.rpc() - .and_then(|rpc| rpc.storage_keys_paged(start_key.clone(), 2, None, block).ok()) - .and_then(|keys| keys.last().cloned()) - }; - - let maybe_next_key = if self.before_fork { - // Before the fork checkpoint, try RPC first, then fall back to local DB - if self.rpc().is_some() { - remote_fetch(self.block_hash) - } else { - // No RPC client, try local DB - self.db.read().next_storage_key(key).ok().flatten() - } - } else { - // Try to get the next storage key from the local DB - let next_storage_key = self.db.read().next_storage_key(key); - match next_storage_key { - Ok(Some(next_key)) => Some(next_key), - // If not found locally and key is not marked as removed, fetch remotely - _ if !self.removed_keys.read().contains_key(key) => { - if self.rpc().is_some() { - remote_fetch(Some(self.fork_block)) - } else { - None - } - } - // Otherwise, there's no next key - _ => None, - } - } - .filter(|next_key| next_key != key); - - log::trace!( - target: super::LAZY_LOADING_LOG_TARGET, - "next_storage_key: (key: {:?}, next_key: {:?})", - hex::encode(key), - maybe_next_key.clone().map(hex::encode) - ); - - Ok(maybe_next_key) - } - - fn next_child_storage_key( - &self, - child_info: &sp_storage::ChildInfo, - key: &[u8], - ) -> Result, Self::Error> { - Ok(self.db.read().next_child_storage_key(child_info, key).ok().flatten()) - } - - fn storage_root<'a>( - &self, - delta: impl Iterator)>, - state_version: StateVersion, - ) -> ( as sp_core::Hasher>::Out, BackendTransaction>) - where - as sp_core::Hasher>::Out: Ord, - { - self.db.read().storage_root(delta, state_version) - } - - fn child_storage_root<'a>( - &self, - child_info: &sp_storage::ChildInfo, - delta: impl Iterator)>, - state_version: StateVersion, - ) -> ( as sp_core::Hasher>::Out, bool, BackendTransaction>) - where - as sp_core::Hasher>::Out: Ord, - { - self.db.read().child_storage_root(child_info, delta, state_version) - } - - fn raw_iter(&self, args: sp_state_machine::IterArgs<'_>) -> Result { - let clone = RawIterArgs { - prefix: args.prefix.map(|v| v.to_vec()), - start_at: args.start_at.map(|v| v.to_vec()), - }; - - Ok(RawIter:: { args: clone, complete: false, _phantom: Default::default() }) - } - - fn register_overlay_stats(&self, stats: &sp_state_machine::StateMachineStats) { - self.db.read().register_overlay_stats(stats) - } - - fn usage_info(&self) -> sp_state_machine::UsageInfo { - self.db.read().usage_info() - } -} - -impl sp_state_machine::backend::AsTrieBackend> - for ForkedLazyBackend -{ - type TrieBackendStorage = PrefixedMemoryDB>; - - fn as_trie_backend( - &self, - ) -> &sp_state_machine::TrieBackend> { - unimplemented!("`as_trie_backend` is not supported in lazy loading mode.") - } -} - -/// Lazy loading (In-memory) backend. Keeps all states and blocks in memory. -pub struct Backend { - pub(crate) rpc_client: Option>>, - pub(crate) fork_checkpoint: Block::Header, - states: parking_lot::RwLock>>, - pub(crate) blockchain: Blockchain, - import_lock: parking_lot::RwLock<()>, - pinned_blocks: parking_lot::RwLock>, -} - -impl Backend { - fn new(rpc_client: Option>>, fork_checkpoint: Block::Header) -> Self { - Self { - rpc_client: rpc_client.clone(), - states: Default::default(), - blockchain: Blockchain::new(rpc_client), - import_lock: Default::default(), - pinned_blocks: Default::default(), - fork_checkpoint, - } - } - - #[inline] - pub fn rpc(&self) -> Option<&dyn RPCClient> { - self.rpc_client.as_deref() - } -} - -impl backend::AuxStore for Backend { - fn insert_aux< - 'a, - 'b: 'a, - 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >( - &self, - _insert: I, - _delete: D, - ) -> sp_blockchain::Result<()> { - unimplemented!("`insert_aux` is not supported in lazy loading mode.") - } - - fn get_aux(&self, _key: &[u8]) -> sp_blockchain::Result>> { - unimplemented!("`get_aux` is not supported in lazy loading mode.") - } -} - -impl backend::Backend for Backend { - type BlockImportOperation = BlockImportOperation; - type Blockchain = Blockchain; - type State = ForkedLazyBackend; - type OffchainStorage = InMemOffchainStorage; - - fn begin_operation(&self) -> sp_blockchain::Result { - let old_state = self.state_at(Default::default(), TrieCacheContext::Trusted)?; - Ok(BlockImportOperation { - pending_block: None, - old_state, - new_state: None, - aux: Default::default(), - storage_updates: Default::default(), - child_storage_updates: Default::default(), - finalized_blocks: Default::default(), - set_head: None, - before_fork: false, - }) - } - - fn begin_state_operation( - &self, - operation: &mut Self::BlockImportOperation, - block: Block::Hash, - ) -> sp_blockchain::Result<()> { - operation.old_state = self.state_at(block, TrieCacheContext::Trusted)?; - Ok(()) - } - - fn commit_operation(&self, operation: Self::BlockImportOperation) -> sp_blockchain::Result<()> { - for (block, justification) in operation.finalized_blocks { - self.blockchain.finalize_header(block, justification)?; - } - - if let Some(pending_block) = operation.pending_block { - let old_state = &operation.old_state; - let (header, body, justification) = pending_block.block.into_inner(); - let hash = header.hash(); - - let storage_updates = operation.storage_updates.clone(); - let child_storage_updates = operation.child_storage_updates.clone(); - - let mut removed_keys_map = old_state.removed_keys.read().clone(); - for (key, value) in &storage_updates { - if value.is_some() { - removed_keys_map.remove(key); - } else { - removed_keys_map.insert(key.clone(), ()); - } - } - let new_removed_keys = Arc::new(parking_lot::RwLock::new(removed_keys_map)); - - let mut db_clone = old_state.db.read().clone(); - { - let mut entries = vec![(None::, storage_updates.clone())]; - if !child_storage_updates.is_empty() { - entries.extend( - child_storage_updates - .iter() - .map(|(key, data)| (Some(ChildInfo::new_default(key)), data.clone())), - ); - } - db_clone.insert(entries, StateVersion::V1); - } - let new_db = Arc::new(parking_lot::RwLock::new(db_clone)); - let new_state = ForkedLazyBackend { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: self.fork_checkpoint.hash(), - db: new_db, - removed_keys: new_removed_keys, - before_fork: operation.before_fork, - }; - self.states.write().insert(hash, new_state); - - self.blockchain.insert(hash, header, justification, body, pending_block.state)?; - } - - if !operation.aux.is_empty() { - self.blockchain.write_aux(operation.aux); - } - - if let Some(set_head) = operation.set_head { - self.blockchain.set_head(set_head)?; - } - - Ok(()) - } - - fn finalize_block( - &self, - hash: Block::Hash, - justification: Option, - ) -> sp_blockchain::Result<()> { - self.blockchain.finalize_header(hash, justification) - } - - fn append_justification( - &self, - hash: Block::Hash, - justification: Justification, - ) -> sp_blockchain::Result<()> { - self.blockchain.append_justification(hash, justification) - } - - fn blockchain(&self) -> &Self::Blockchain { - &self.blockchain - } - - fn usage_info(&self) -> Option { - None - } - - fn offchain_storage(&self) -> Option { - None - } - - fn state_at( - &self, - hash: Block::Hash, - _trie_cache_context: TrieCacheContext, - ) -> sp_blockchain::Result { - if hash == Default::default() { - return Ok(ForkedLazyBackend:: { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: self.fork_checkpoint.hash(), - db: Default::default(), - removed_keys: Default::default(), - before_fork: true, - }); - } - - let (backend, should_write) = - self.states.read().get(&hash).cloned().map(|state| Ok((state, false))).unwrap_or_else( - || { - self.rpc() - .and_then(|rpc| rpc.header(Some(hash)).ok()) - .flatten() - .ok_or(sp_blockchain::Error::UnknownBlock(format!( - "Failed to fetch block header: {hash:?}" - ))) - .map(|header| { - let checkpoint = self.fork_checkpoint.clone(); - let state = if header.number().gt(checkpoint.number()) { - let parent = self - .state_at(*header.parent_hash(), TrieCacheContext::Trusted) - .ok(); - - ForkedLazyBackend:: { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: checkpoint.hash(), - db: parent.clone().map_or(Default::default(), |p| p.db), - removed_keys: parent - .map_or(Default::default(), |p| p.removed_keys), - before_fork: false, - } - } else { - ForkedLazyBackend:: { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: checkpoint.hash(), - db: Default::default(), - removed_keys: Default::default(), - before_fork: true, - } - }; - - (state, true) - }) - }, - )?; - - if should_write { - self.states.write().insert(hash, backend.clone()); - } - - Ok(backend) - } - - fn revert( - &self, - n: NumberFor, - revert_finalized: bool, - ) -> sp_blockchain::Result<(NumberFor, HashSet)> { - let mut storage = self.blockchain.storage.write(); - - if storage.blocks.is_empty() { - return Ok((Zero::zero(), HashSet::new())); - } - - let mut states = self.states.write(); - let pinned = self.pinned_blocks.read(); - - let mut target = n; - let original_finalized_number = storage.finalized_number; - - if !target.is_zero() && !revert_finalized { - let revertible = storage.best_number.saturating_sub(storage.finalized_number); - if target > revertible { - target = revertible; - } - } - - let mut reverted = NumberFor::::zero(); - let mut reverted_finalized = HashSet::new(); - - let mut current_hash = storage.best_hash; - let mut current_number = storage.best_number; - - while reverted < target { - if current_number.is_zero() { - break; - } - - if let Some(count) = pinned.get(¤t_hash) { - if *count > 0 { - break; - } - } - - let Some(block) = storage.blocks.get(¤t_hash) else { - break; - }; - - let header = block.header().clone(); - let number = *header.number(); - let parent_hash = header.parent_hash(); - let parent_number = number.saturating_sub(One::one()); - - let parent_becomes_leaf = if number.is_zero() { - false - } else { - !storage.blocks.iter().any(|(other_hash, stored)| { - *other_hash != current_hash && stored.header().parent_hash() == parent_hash - }) - }; - - let hash_to_remove = current_hash; - - storage.blocks.remove(&hash_to_remove); - if let Some(entry) = storage.hashes.get(&number) { - if *entry == hash_to_remove { - storage.hashes.remove(&number); - } - } - states.remove(&hash_to_remove); - - storage.leaves.remove( - hash_to_remove, - number, - parent_becomes_leaf.then_some(*parent_hash), - ); - - if number <= original_finalized_number { - reverted_finalized.insert(hash_to_remove); - } - - reverted = reverted.saturating_add(One::one()); - - current_hash = *parent_hash; - current_number = parent_number; - - storage.best_hash = current_hash; - storage.best_number = current_number; - } - - let best_hash_after = storage.best_hash; - let best_number_after = storage.best_number; - let extra_leaves: Vec<_> = - storage.leaves.revert(best_hash_after, best_number_after).collect(); - - for (hash, number) in extra_leaves { - if let Some(count) = pinned.get(&hash) { - if *count > 0 { - return Err(sp_blockchain::Error::Backend(format!( - "Can't revert pinned block {hash:?}", - ))); - } - } - - storage.blocks.remove(&hash); - if let Some(entry) = storage.hashes.get(&number) { - if *entry == hash { - storage.hashes.remove(&number); - } - } - states.remove(&hash); - - if number <= original_finalized_number { - reverted_finalized.insert(hash); - } - } - - storage.hashes.insert(best_number_after, best_hash_after); - - if storage.finalized_number > best_number_after { - storage.finalized_number = best_number_after; - } - - while storage.finalized_number > Zero::zero() - && !storage.hashes.contains_key(&storage.finalized_number) - { - storage.finalized_number = storage.finalized_number.saturating_sub(One::one()); - } - - if let Some(hash) = storage.hashes.get(&storage.finalized_number).copied() { - storage.finalized_hash = hash; - } else { - storage.finalized_hash = storage.genesis_hash; - } - - drop(pinned); - drop(states); - - Ok((reverted, reverted_finalized)) - } - - fn remove_leaf_block(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { - let best_hash = self.blockchain.info().best_hash; - - if best_hash == hash { - return Err(sp_blockchain::Error::Backend( - format!("Can't remove best block {hash:?}",), - )); - } - - let mut storage = self.blockchain.storage.write(); - - let Some(block) = storage.blocks.get(&hash) else { - return Err(sp_blockchain::Error::UnknownBlock(format!("{hash:?}"))); - }; - - let number = *block.header().number(); - let parent_hash = *block.header().parent_hash(); - - if !storage.leaves.contains(number, hash) { - return Err(sp_blockchain::Error::Backend(format!( - "Can't remove non-leaf block {hash:?}", - ))); - } - - if self.pinned_blocks.read().get(&hash).is_some_and(|count| *count > 0) { - return Err(sp_blockchain::Error::Backend(format!( - "Can't remove pinned block {hash:?}", - ))); - } - - let parent_becomes_leaf = if number.is_zero() { - false - } else { - !storage.blocks.iter().any(|(other_hash, stored)| { - *other_hash != hash && stored.header().parent_hash() == &parent_hash - }) - }; - - let mut states = self.states.write(); - - storage.blocks.remove(&hash); - if let Some(entry) = storage.hashes.get(&number) { - if *entry == hash { - storage.hashes.remove(&number); - } - } - states.remove(&hash); - - storage.leaves.remove(hash, number, parent_becomes_leaf.then_some(parent_hash)); - - Ok(()) - } - - fn get_import_lock(&self) -> &parking_lot::RwLock<()> { - &self.import_lock - } - - fn requires_full_sync(&self) -> bool { - false - } - - fn pin_block(&self, hash: ::Hash) -> blockchain::Result<()> { - let mut blocks = self.pinned_blocks.write(); - *blocks.entry(hash).or_default() += 1; - Ok(()) - } - - fn unpin_block(&self, hash: ::Hash) { - let mut blocks = self.pinned_blocks.write(); - blocks.entry(hash).and_modify(|counter| *counter -= 1).or_insert(-1); - } -} - -impl backend::LocalBackend for Backend {} - -/// Check that genesis storage is valid. -pub fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { - if storage.top.iter().any(|(k, _)| well_known_keys::is_child_storage_key(k)) { - return Err(sp_blockchain::Error::InvalidState); - } - - Ok(()) -} - -pub fn new_backend( - rpc_client: Option>>, - checkpoint: Block::Header, -) -> Result>, Error> -where - Block: BlockT + DeserializeOwned, - Block::Hash: From, -{ - let backend = Arc::new(Backend::new(rpc_client, checkpoint)); - Ok(backend) -} - -#[cfg(test)] -mod tests { - use super::*; - use mock_rpc::{Rpc, TestBlock, TestHeader}; - use polkadot_sdk::{ - sc_client_api::{Backend as BackendT, StateBackend}, - sp_runtime::{ - OpaqueExtrinsic, - traits::{BlakeTwo256, Header as HeaderT}, - }, - sp_state_machine::StorageIterator, - sp_storage::StorageData, - }; - use std::{ - collections::BTreeMap, - sync::atomic::{AtomicUsize, Ordering}, - }; - - mod mock_rpc { - use super::*; - use polkadot_sdk::sp_runtime::{ - generic::{Block as GenericBlock, Header, SignedBlock}, - traits::Header as HeaderT, - }; - - pub type TestHashing = BlakeTwo256; - pub type TestHeader = Header; - pub type TestExtrinsic = OpaqueExtrinsic; - pub type TestBlock = GenericBlock, TestExtrinsic>; - - #[derive(Default, Debug)] - pub struct Counters { - pub storage_calls: AtomicUsize, - pub storage_hash_calls: AtomicUsize, - pub storage_keys_paged_calls: AtomicUsize, - pub header_calls: AtomicUsize, - pub block_calls: AtomicUsize, - } - - /// Mockable RPC with interior mutability. - #[allow(clippy::type_complexity)] - #[derive(Clone, Default, Debug)] - pub struct Rpc { - pub counters: std::sync::Arc, - /// storage[(block_hash, key)] = value - pub storage: Arc>>, - /// storage_hash[(block_hash, key)] = hash - pub storage_hashes: - Arc>>, - /// storage_keys_paged[(block_hash, (prefix,start))] = Vec - pub storage_keys_pages: - Arc), Vec>>>, - /// headers[hash] = header - pub headers: Arc>>, - /// blocks[hash] = SignedBlock - pub blocks: Arc>>>, - } - - impl Rpc { - pub fn new() -> Self { - Self { - counters: std::sync::Arc::new(Counters::default()), - storage: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - storage_hashes: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - storage_keys_pages: std::sync::Arc::new(parking_lot::RwLock::new( - BTreeMap::new(), - )), - headers: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - blocks: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - } - } - - pub fn put_storage(&self, at: Block::Hash, key: StorageKey, val: StorageData) { - self.storage.write().insert((at, key), val); - } - pub fn put_storage_keys_page( - &self, - at: Block::Hash, - prefix: Vec, - keys: Vec, - ) { - self.storage_keys_pages.write().insert((at, prefix), keys); - } - pub fn put_header(&self, h: Block::Header) { - self.headers.write().insert(h.hash(), h); - } - pub fn put_block(&self, block: Block, just: Option) { - let full = SignedBlock { block, justifications: just }; - self.blocks.write().insert(full.block.header().hash(), full); - } - } - - impl RPCClient for Rpc { - fn storage( - &self, - key: StorageKey, - at: Option, - ) -> Result, jsonrpsee::core::ClientError> { - self.counters.storage_calls.fetch_add(1, Ordering::Relaxed); - let map = self.storage.read(); - Ok(map.get(&(at.unwrap_or_default(), key)).cloned()) - } - - fn storage_hash( - &self, - key: StorageKey, - at: Option, - ) -> Result, jsonrpsee::core::ClientError> { - self.counters.storage_hash_calls.fetch_add(1, Ordering::Relaxed); - let bh = at.unwrap_or_default(); - let map = self.storage_hashes.read(); - Ok(map.get(&(bh, key)).copied()) - } - - fn storage_keys_paged( - &self, - key: Option, - count: u32, - start_key: Option, - at: Option, - ) -> Result, jsonrpsee::core::ClientError> - { - self.counters.storage_keys_paged_calls.fetch_add(1, Ordering::Relaxed); - - use std::cmp::min; - - let bh = at.unwrap_or_default(); - let prefix = key.map(|k| k.0).unwrap_or_default(); - let start = start_key.map(|k| k.0); - - let map = self.storage_keys_pages.read(); - let mut all = map.get(&(bh, prefix.clone())).cloned().unwrap_or_default(); - - all.sort_by(|a, b| a.0.cmp(&b.0)); - - let mut filtered: Vec = - all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); - - if let Some(s) = start { - if let Some(pos) = filtered.iter().position(|k| k.0 == s) { - filtered = filtered.into_iter().skip(pos + 1).collect(); - } else { - filtered.retain(|k| k.0 > s); - } - } - - let take = min(filtered.len(), count as usize); - Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) - } - - fn header( - &self, - at: Option, - ) -> Result, jsonrpsee::core::ClientError> { - self.counters.header_calls.fetch_add(1, Ordering::Relaxed); - let key = at.unwrap_or_default(); - let raw = self.headers.read().get(&key).cloned(); - Ok(raw) - } - - fn block( - &self, - hash: Option, - ) -> Result>, jsonrpsee::core::ClientError> { - self.counters.block_calls.fetch_add(1, Ordering::Relaxed); - let key = hash.unwrap_or_default(); - let raw = self.blocks.read().get(&key).cloned(); - Ok(raw) - } - - fn block_hash( - &self, - _num: Option>, - ) -> Result, jsonrpsee::core::ClientError> { - todo!() - } - - fn system_chain(&self) -> Result { - todo!() - } - - fn system_properties( - &self, - ) -> Result - { - todo!() - } - } - } - - type N = u32; - type TestBlockT = TestBlock; - - fn make_header(number: N, parent: ::Hash) -> TestHeader { - TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) - } - - fn make_block( - number: N, - parent: ::Hash, - xts: Vec, - ) -> TestBlock { - let header = make_header(number, parent); - TestBlock::new(header, xts) - } - - fn checkpoint(n: N) -> TestHeader { - make_header(n, Default::default()) - } - - #[test] - fn before_fork_reads_remote_only() { - let rpc = std::sync::Arc::new(Rpc::new()); - // fork checkpoint at #100 - let cp = checkpoint(100); - let backend = Backend::::new(Some(rpc.clone()), cp); - - // state_at(Default::default()) => before_fork=true - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - - let key = b":foo".to_vec(); - // prepare remote value at "block_hash = Default::default()" - let at = Default::default(); - rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); - - // read storage - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, Some(b"bar".to_vec())); - - // not cached in DB: second read still goes to RPC - let v2 = state.storage(&key).unwrap(); - assert_eq!(v2, Some(b"bar".to_vec())); - assert!(rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed) >= 2); - } - - #[test] - fn after_fork_first_fetch_caches_subsequent_hits_local() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(10); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); - - // Build a block #11 > checkpoint (#10), with parent #10 - let parent = cp.hash(); - let b11 = make_block(11, parent, vec![]); - let h11 = b11.header.hash(); - - rpc.put_header(b11.header.clone()); - rpc.put_block(b11, None); - - // remote storage at fork block (checkpoint hash) - let fork_hash = cp.hash(); - let key = b":k".to_vec(); - rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); - - // Grab state_at(#11): after_fork=false; local DB empty - let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); - - // First read fetches remote and caches - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, Some(b"v".to_vec())); - - // Mutate RPC to detect second call (remove remote value) - // If second read still tries RPC, it would return None; but it should come from cache. - // So we do not change the mock; instead, assert RPC call count increases only once. - let calls_before = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); - let _ = state.storage(&key).unwrap(); - let calls_after = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); - assert_eq!(calls_before, calls_after, "second hit should be served from cache"); - } - - #[test] - fn removed_keys_prevents_remote_fetch() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(5); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); - - // make block #6 - let b6 = make_block(6, cp.hash(), vec![]); - rpc.put_header(b6.header.clone()); - rpc.put_block(b6.clone(), None); - let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); - - // mark key as removed - let key = b":dead".to_vec(); - state.removed_keys.write().insert(key.clone(), ()); - - // Even if remote has a value, backend must not fetch it - rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); - let calls_before = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); - let v = state.storage(&key).unwrap(); - let calls_after = rpc.counters.storage_calls.load(std::sync::atomic::Ordering::Relaxed); - - assert!(v.is_none()); - assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); - } - - #[test] - fn raw_iter_merges_local_then_remote() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(7); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); - - // block #8 - let b8 = make_block(8, cp.hash(), vec![]); - rpc.put_header(b8.header.clone()); - rpc.put_block(b8.clone(), None); - let state = backend.state_at(b8.header.hash(), TrieCacheContext::Trusted).unwrap(); - - // Preload local DB with key "a1" - state.update_storage(b"a1", &Some(b"v1".to_vec())); - - // Ensure storage_root is computed to make the key visible to raw_iter - let _ = state.db.write().storage_root( - vec![(b"a1".as_ref(), Some(b"v1".as_ref()))].into_iter(), - StateVersion::V1, - ); - - // Remote has only "a2" under same prefix at fork block (not "a1") - rpc.put_storage_keys_page(cp.hash(), b"a".to_vec(), vec![StorageKey(b"a2".to_vec())]); - rpc.put_storage(cp.hash(), StorageKey(b"a2".to_vec()), StorageData(b"v2".to_vec())); - - let mut args = sp_state_machine::IterArgs::default(); - args.prefix = Some(&b"a"[..]); - let mut it = state.raw_iter(args).unwrap(); - - // next_pair should return ("a1","v1") from local - let p1 = it.next_pair(&state).unwrap().unwrap(); - assert_eq!(p1.0, b"a1".to_vec()); - assert_eq!(p1.1, b"v1".to_vec()); - - // next_pair should now bring remote ("a2","v2") - let p2 = it.next_pair(&state).unwrap().unwrap(); - assert_eq!(p2.0, b"a2".to_vec()); - assert_eq!(p2.1, b"v2".to_vec()); - - // done - assert!(it.next_pair(&state).is_none()); - assert!(it.was_complete()); - } - - #[test] - fn blockchain_header_and_number_are_cached() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(3); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); - let chain = backend.blockchain(); - - // prepare one block w/ extrinsics - let xts: Vec = vec![]; - let b4 = make_block(4, cp.hash(), xts); - let h4 = b4.header().hash(); - rpc.put_block(b4, None); - - // first header() fetches RPC and caches as Full - let h = chain.header(h4).unwrap().unwrap(); - assert_eq!(h.hash(), h4); - - // number() should now return from cache (no extra RPC needed) - let calls_before = rpc.counters.block_calls.load(std::sync::atomic::Ordering::Relaxed); - let number = chain.number(h4).unwrap().unwrap(); - let calls_after = rpc.counters.block_calls.load(std::sync::atomic::Ordering::Relaxed); - - assert_eq!(number, 4); - assert_eq!( - calls_before, calls_after, - "number() should be served from cache after header()" - ); - } -} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs new file mode 100644 index 0000000000000..e7b83a6319d64 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs @@ -0,0 +1,188 @@ +use super::{blockchain::StoredBlock, forked_lazy_backend::ForkedLazyBackend}; +use polkadot_sdk::{ + sc_client_api::backend, + sp_blockchain, + sp_runtime::{ + Justification, Justifications, StateVersion, Storage, + traits::{Block as BlockT, HashingFor}, + }, + sp_state_machine::{self, BackendTransaction, ChildStorageCollection, StorageCollection}, +}; +use serde::de::DeserializeOwned; + +pub(crate) struct PendingBlock { + pub(crate) block: StoredBlock, + pub(crate) state: backend::NewBlockState, +} + +pub struct BlockImportOperation { + pub(crate) pending_block: Option>, + pub(crate) old_state: ForkedLazyBackend, + pub(crate) new_state: Option>>, + pub(crate) aux: Vec<(Vec, Option>)>, + pub(crate) storage_updates: StorageCollection, + pub(crate) child_storage_updates: ChildStorageCollection, + pub(crate) finalized_blocks: Vec<(Block::Hash, Option)>, + pub(crate) set_head: Option, + pub(crate) before_fork: bool, +} + +impl BlockImportOperation { + pub(crate) fn apply_storage( + &mut self, + storage: Storage, + commit: bool, + state_version: StateVersion, + ) -> sp_blockchain::Result { + use sp_state_machine::Backend; + check_genesis_storage(&storage)?; + + let child_delta = storage.children_default.values().map(|child_content| { + ( + &child_content.child_info, + child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + ) + }); + + let (root, transaction) = self.old_state.full_storage_root( + storage.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + child_delta, + state_version, + ); + + if commit { + self.new_state = Some(transaction); + self.storage_updates = + storage + .top + .iter() + .map(|(k, v)| { + if v.is_empty() { (k.clone(), None) } else { (k.clone(), Some(v.clone())) } + }) + .collect(); + + self.child_storage_updates = storage + .children_default + .values() + .map(|child_content| { + let child_storage: StorageCollection = child_content + .data + .iter() + .map(|(k, v)| { + if v.is_empty() { + (k.clone(), None) + } else { + (k.clone(), Some(v.clone())) + } + }) + .collect(); + (child_content.child_info.storage_key().to_vec(), child_storage) + }) + .collect(); + } + Ok(root) + } +} + +impl backend::BlockImportOperation + for BlockImportOperation +{ + type State = ForkedLazyBackend; + + fn state(&self) -> sp_blockchain::Result> { + Ok(Some(&self.old_state)) + } + + fn set_block_data( + &mut self, + header: ::Header, + body: Option::Extrinsic>>, + _indexed_body: Option>>, + justifications: Option, + state: backend::NewBlockState, + ) -> sp_blockchain::Result<()> { + assert!(self.pending_block.is_none(), "Only one block per operation is allowed"); + self.pending_block = + Some(PendingBlock { block: StoredBlock::new(header, body, justifications), state }); + Ok(()) + } + + fn update_db_storage( + &mut self, + update: BackendTransaction>, + ) -> sp_blockchain::Result<()> { + self.new_state = Some(update); + Ok(()) + } + + fn set_genesis_state( + &mut self, + storage: Storage, + commit: bool, + state_version: StateVersion, + ) -> sp_blockchain::Result { + self.apply_storage(storage, commit, state_version) + } + + fn reset_storage( + &mut self, + storage: Storage, + state_version: StateVersion, + ) -> sp_blockchain::Result { + self.apply_storage(storage, true, state_version) + } + + fn insert_aux(&mut self, ops: I) -> sp_blockchain::Result<()> + where + I: IntoIterator, Option>)>, + { + self.aux.append(&mut ops.into_iter().collect()); + Ok(()) + } + + fn update_storage( + &mut self, + update: StorageCollection, + child_update: ChildStorageCollection, + ) -> sp_blockchain::Result<()> { + self.storage_updates = update; + self.child_storage_updates = child_update; + Ok(()) + } + + fn mark_finalized( + &mut self, + hash: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + self.finalized_blocks.push((hash, justification)); + Ok(()) + } + + fn mark_head(&mut self, hash: Block::Hash) -> sp_blockchain::Result<()> { + assert!(self.pending_block.is_none(), "Only one set block per operation is allowed"); + self.set_head = Some(hash); + Ok(()) + } + + fn update_transaction_index( + &mut self, + _index: Vec, + ) -> sp_blockchain::Result<()> { + Ok(()) + } + + fn set_create_gap(&mut self, _create_gap: bool) {} +} + +pub(crate) fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { + if storage + .top + .iter() + .any(|(k, _)| polkadot_sdk::sp_core::storage::well_known_keys::is_child_storage_key(k)) + { + return Err(sp_blockchain::Error::InvalidState); + } + + Ok(()) +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs new file mode 100644 index 0000000000000..57efea0e78722 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs @@ -0,0 +1,474 @@ +use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; +use polkadot_sdk::{ + sc_client_api::{ + backend::{self, NewBlockState}, + blockchain::{self, BlockStatus, HeaderBackend}, + leaves::LeafSet, + }, + sp_blockchain::{self, CachedHeaderMetadata, HeaderMetadata}, + sp_runtime::{ + Justification, Justifications, + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}, + }, +}; +use serde::de::DeserializeOwned; +use std::{collections::HashMap, ptr, sync::Arc}; + +#[derive(PartialEq, Eq, Clone)] +pub(crate) enum StoredBlock { + Header(B::Header, Option), + Full(B, Option), +} + +impl StoredBlock { + pub(crate) fn new( + header: B::Header, + body: Option>, + just: Option, + ) -> Self { + match body { + Some(body) => Self::Full(B::new(header, body), just), + None => Self::Header(header, just), + } + } + + pub(crate) fn header(&self) -> &B::Header { + match *self { + Self::Header(ref h, _) => h, + Self::Full(ref b, _) => b.header(), + } + } + + pub(crate) fn justifications(&self) -> Option<&Justifications> { + match *self { + Self::Header(_, ref j) | Self::Full(_, ref j) => j.as_ref(), + } + } + + pub(crate) fn extrinsics(&self) -> Option<&[B::Extrinsic]> { + match *self { + Self::Header(_, _) => None, + Self::Full(ref b, _) => Some(b.extrinsics()), + } + } + + pub(crate) fn into_inner( + self, + ) -> (B::Header, Option>, Option) { + match self { + Self::Header(header, just) => (header, None, just), + Self::Full(block, just) => { + let (header, body) = block.deconstruct(); + (header, Some(body), just) + } + } + } +} + +#[derive(Clone)] +pub(crate) struct BlockchainStorage { + pub(crate) blocks: HashMap>, + pub(crate) hashes: HashMap, Block::Hash>, + pub(crate) best_hash: Block::Hash, + pub(crate) best_number: NumberFor, + pub(crate) finalized_hash: Block::Hash, + pub(crate) finalized_number: NumberFor, + pub(crate) genesis_hash: Block::Hash, + pub(crate) header_cht_roots: HashMap, Block::Hash>, + pub(crate) leaves: LeafSet>, + pub(crate) aux: HashMap, Vec>, +} + +/// In-memory blockchain. Supports concurrent reads. +#[derive(Clone)] +pub struct Blockchain { + rpc_client: Option>>, + pub(crate) storage: Arc>>, +} + +impl Blockchain { + /// Create new in-memory blockchain storage. + pub(crate) fn new(rpc_client: Option>>) -> Self { + let storage = Arc::new(parking_lot::RwLock::new(BlockchainStorage { + blocks: HashMap::new(), + hashes: HashMap::new(), + best_hash: Default::default(), + best_number: Zero::zero(), + finalized_hash: Default::default(), + finalized_number: Zero::zero(), + genesis_hash: Default::default(), + header_cht_roots: HashMap::new(), + leaves: LeafSet::new(), + aux: HashMap::new(), + })); + Self { rpc_client, storage } + } + + #[inline] + fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } + + /// Get header hash of given block. + pub fn id(&self, id: BlockId) -> Option { + match id { + BlockId::Hash(h) => Some(h), + BlockId::Number(n) => { + let block_hash = self.storage.read().hashes.get(&n).copied(); + + match block_hash { + None => { + let block_hash = + self.rpc().and_then(|rpc| rpc.block_hash(Some(n)).ok().flatten()); + if let Some(h) = block_hash { + self.storage.write().hashes.insert(n, h); + } + block_hash + } + block_hash => block_hash, + } + } + } + } + + /// Insert a block header and associated data. + pub fn insert( + &self, + hash: Block::Hash, + header: ::Header, + justifications: Option, + body: Option::Extrinsic>>, + new_state: NewBlockState, + ) -> sp_blockchain::Result<()> { + let number = *header.number(); + + if new_state.is_best() { + self.apply_head(&header)?; + } + + let mut storage = self.storage.write(); + + // Always insert the block into blocks and hashes storage + storage.blocks.insert(hash, StoredBlock::new(header.clone(), body, justifications)); + storage.hashes.insert(number, hash); + + // Set genesis_hash only for the first block inserted + if storage.blocks.len() == 1 { + storage.genesis_hash = hash; + } + + // Update leaves for non-genesis blocks + if storage.blocks.len() > 1 { + storage.leaves.import(hash, number, *header.parent_hash()); + } + + // Finalize block only if explicitly requested via new_state + if let NewBlockState::Final = new_state { + storage.finalized_hash = hash; + storage.finalized_number = number; + } + + Ok(()) + } + + /// Get total number of blocks. + pub fn blocks_count(&self) -> usize { + self.storage.read().blocks.len() + } + + /// Compare this blockchain with another in-mem blockchain + pub fn equals_to(&self, other: &Self) -> bool { + // Check ptr equality first to avoid double read locks. + if ptr::eq(self, other) { + return true; + } + self.canon_equals_to(other) && self.storage.read().blocks == other.storage.read().blocks + } + + /// Compare canonical chain to other canonical chain. + pub fn canon_equals_to(&self, other: &Self) -> bool { + // Check ptr equality first to avoid double read locks. + if ptr::eq(self, other) { + return true; + } + let this = self.storage.read(); + let other = other.storage.read(); + this.hashes == other.hashes + && this.best_hash == other.best_hash + && this.best_number == other.best_number + && this.genesis_hash == other.genesis_hash + } + + /// Insert header CHT root. + pub fn insert_cht_root(&self, block: NumberFor, cht_root: Block::Hash) { + self.storage.write().header_cht_roots.insert(block, cht_root); + } + + /// Set an existing block as head. + pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { + let header = self + .header(hash)? + .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{hash:?}")))?; + + self.apply_head(&header) + } + + fn apply_head(&self, header: &::Header) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + + let hash = header.hash(); + let number = header.number(); + + storage.best_hash = hash; + storage.best_number = *number; + storage.hashes.insert(*number, hash); + + Ok(()) + } + + pub(crate) fn finalize_header( + &self, + block: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + storage.finalized_hash = block; + + if justification.is_some() { + let block = storage + .blocks + .get_mut(&block) + .expect("hash was fetched from a block in the db; qed"); + + let block_justifications = match block { + StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, + }; + + *block_justifications = justification.map(Justifications::from); + } + + Ok(()) + } + + pub(crate) fn append_justification( + &self, + hash: Block::Hash, + justification: Justification, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + + let block = + storage.blocks.get_mut(&hash).expect("hash was fetched from a block in the db; qed"); + + let block_justifications = match block { + StoredBlock::Header(_, j) | StoredBlock::Full(_, j) => j, + }; + + if let Some(stored_justifications) = block_justifications { + if !stored_justifications.append(justification) { + return Err(sp_blockchain::Error::BadJustification( + "Duplicate consensus engine ID".into(), + )); + } + } else { + *block_justifications = Some(Justifications::from(justification)); + }; + + Ok(()) + } + + pub(crate) fn write_aux(&self, ops: Vec<(Vec, Option>)>) { + let mut storage = self.storage.write(); + for (k, v) in ops { + match v { + Some(v) => storage.aux.insert(k, v), + None => storage.aux.remove(&k), + }; + } + } +} + +impl HeaderBackend for Blockchain { + fn header( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Header>> { + // First, try to get the header from local storage + if let Some(header) = self.storage.read().blocks.get(&hash).map(|b| b.header().clone()) { + return Ok(Some(header)); + } + + // If not found in local storage, fetch from RPC client + let header = if let Some(rpc) = self.rpc() { + rpc.block(Some(hash)).ok().flatten().map(|full| { + let block = full.block.clone(); + self.storage + .write() + .blocks + .insert(hash, StoredBlock::Full(block.clone(), full.justifications)); + block.header().clone() + }) + } else { + None + }; + + if header.is_none() { + log::warn!( + target: LAZY_LOADING_LOG_TARGET, + "Expected block {:x?} to exist.", + &hash + ); + } + + Ok(header) + } + + fn info(&self) -> blockchain::Info { + let storage = self.storage.read(); + let finalized_state = if storage.blocks.len() <= 1 { + None + } else { + Some((storage.finalized_hash, storage.finalized_number)) + }; + + blockchain::Info { + best_hash: storage.best_hash, + best_number: storage.best_number, + genesis_hash: storage.genesis_hash, + finalized_hash: storage.finalized_hash, + finalized_number: storage.finalized_number, + finalized_state, + number_leaves: storage.leaves.count(), + block_gap: None, + } + } + + fn status(&self, hash: Block::Hash) -> sp_blockchain::Result { + match self.storage.read().blocks.contains_key(&hash) { + true => Ok(BlockStatus::InChain), + false => Ok(BlockStatus::Unknown), + } + } + + fn number(&self, hash: Block::Hash) -> sp_blockchain::Result>> { + if let Some(b) = self.storage.read().blocks.get(&hash) { + return Ok(Some(*b.header().number())); + } + match self.rpc() { + Some(rpc) => match rpc.block(Some(hash)) { + Ok(Some(block)) => Ok(Some(*block.block.header().number())), + err => Err(sp_blockchain::Error::UnknownBlock(format!( + "Failed to fetch block number from RPC: {err:?}" + ))), + }, + None => Err(sp_blockchain::Error::UnknownBlock( + "RPC not configured to resolve block number".into(), + )), + } + } + + fn hash( + &self, + number: <::Header as HeaderT>::Number, + ) -> sp_blockchain::Result> { + Ok(self.id(BlockId::Number(number))) + } +} + +impl HeaderMetadata for Blockchain { + type Error = sp_blockchain::Error; + + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { + self.header(hash)?.map(|header| CachedHeaderMetadata::from(&header)).ok_or_else(|| { + sp_blockchain::Error::UnknownBlock(format!("header not found: {hash:?}")) + }) + } + + fn insert_header_metadata(&self, _hash: Block::Hash, _metadata: CachedHeaderMetadata) { + // No need to implement. + unimplemented!("insert_header_metadata") + } + fn remove_header_metadata(&self, _hash: Block::Hash) { + // No need to implement. + unimplemented!("remove_header_metadata") + } +} + +impl blockchain::Backend for Blockchain { + fn body( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Extrinsic>>> { + if let Some(xs) = + self.storage.read().blocks.get(&hash).and_then(|b| b.extrinsics().map(|x| x.to_vec())) + { + return Ok(Some(xs)); + } + let extrinsics = self.rpc().and_then(|rpc| { + rpc.block(Some(hash)).ok().flatten().map(|b| b.block.extrinsics().to_vec()) + }); + Ok(extrinsics) + } + + fn justifications(&self, hash: Block::Hash) -> sp_blockchain::Result> { + Ok(self.storage.read().blocks.get(&hash).and_then(|b| b.justifications().cloned())) + } + + fn last_finalized(&self) -> sp_blockchain::Result { + let last_finalized = self.storage.read().finalized_hash; + + Ok(last_finalized) + } + + fn leaves(&self) -> sp_blockchain::Result> { + let leaves = self.storage.read().leaves.hashes(); + + Ok(leaves) + } + + fn children(&self, _parent_hash: Block::Hash) -> sp_blockchain::Result> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } + + fn indexed_transaction(&self, _hash: Block::Hash) -> sp_blockchain::Result>> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } + + fn block_indexed_body( + &self, + _hash: Block::Hash, + ) -> sp_blockchain::Result>>> { + unimplemented!("Not supported by the `lazy-loading` backend.") + } +} + +impl backend::AuxStore for Blockchain { + fn insert_aux< + 'a, + 'b: 'a, + 'c: 'a, + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { + let mut storage = self.storage.write(); + for (k, v) in insert { + storage.aux.insert(k.to_vec(), v.to_vec()); + } + for k in delete { + storage.aux.remove(*k); + } + Ok(()) + } + + fn get_aux(&self, key: &[u8]) -> sp_blockchain::Result>> { + Ok(self.storage.read().aux.get(key).cloned()) + } +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs new file mode 100644 index 0000000000000..e5727d1dbec8c --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -0,0 +1,530 @@ +use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; +use alloy_primitives::hex; +use polkadot_sdk::{ + sc_client_api::StorageKey, + sp_core, + sp_runtime::{ + StateVersion, + traits::{Block as BlockT, HashingFor}, + }, + sp_state_machine::{ + self, BackendTransaction, InMemoryBackend, IterArgs, StorageCollection, StorageValue, + TrieBackend, backend::AsTrieBackend, + }, + sp_storage::ChildInfo, + sp_trie::{self, PrefixedMemoryDB}, +}; +use serde::de::DeserializeOwned; +use std::{collections::HashMap, marker::PhantomData, sync::Arc}; + +/// DB-backed patricia trie state, transaction type is an overlay of changes to commit. +pub type DbState = TrieBackend>>, HashingFor>; + +/// A struct containing arguments for iterating over the storage. +#[derive(Default)] +pub struct RawIterArgs { + /// The prefix of the keys over which to iterate. + pub prefix: Option>, + + /// The prefix from which to start the iteration from. + /// + /// This is inclusive and the iteration will include the key which is specified here. + pub start_at: Option>, +} + +/// A raw iterator over the `BenchmarkingState`. +pub struct RawIter { + pub(crate) args: RawIterArgs, + complete: bool, + _phantom: PhantomData, +} + +impl sp_state_machine::StorageIterator> + for RawIter +{ + type Backend = ForkedLazyBackend; + type Error = String; + + fn next_key( + &mut self, + backend: &Self::Backend, + ) -> Option> { + use sp_state_machine::Backend; + + let remote_fetch = + |key: Option, start_key: Option, block: Option| { + backend + .rpc() + .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) + .and_then(|keys| keys.first().cloned()) + }; + + let prefix = self.args.prefix.clone().map(StorageKey); + let start_key = self.args.start_at.clone().map(StorageKey); + + let maybe_next_key = if backend.before_fork { + // If RPC client is available, fetch remotely + if backend.rpc().is_some() { + remote_fetch(prefix, start_key, backend.block_hash) + } else { + // No RPC client, use local DB + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.start_at = self.args.start_at.as_deref(); + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten() + } + } else { + // First, try to get next key from local DB + let next_storage_key = if let Some(ref start) = self.args.start_at { + // If we have a start_at, use next_storage_key to get the next one after it + backend.db.read().next_storage_key(start).ok().flatten() + } else { + // No start_at, use raw_iter to get the first key with the prefix + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten() + }; + + // Filter by prefix if necessary + let next_storage_key = next_storage_key + .filter(|key| prefix.as_ref().map(|p| key.starts_with(&p.0)).unwrap_or(true)); + + let removed_key = start_key + .clone() + .or(prefix.clone()) + .map(|key| backend.removed_keys.read().contains_key(&key.0)) + .unwrap_or(false); + if next_storage_key.is_none() && !removed_key { + let maybe_next_key = if backend.rpc().is_some() { + remote_fetch(prefix, start_key, Some(backend.fork_block)) + } else { + None + }; + match maybe_next_key { + Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), + _ => None, + } + } else { + next_storage_key + } + }; + + log::trace!( + target: LAZY_LOADING_LOG_TARGET, + "next_key: (prefix: {:?}, start_at: {:?}, next_key: {:?})", + self.args.prefix.clone().map(hex::encode), + self.args.start_at.clone().map(hex::encode), + maybe_next_key.clone().map(hex::encode) + ); + + if let Some(next_key) = maybe_next_key { + if self + .args + .prefix + .clone() + .map(|filter_key| next_key.starts_with(&filter_key)) + .unwrap_or(false) + { + self.args.start_at = Some(next_key.clone()); + Some(Ok(next_key)) + } else { + self.complete = true; + None + } + } else { + self.complete = true; + None + } + } + + fn next_pair( + &mut self, + backend: &Self::Backend, + ) -> Option> + { + use sp_state_machine::Backend; + + let remote_fetch = + |key: Option, start_key: Option, block: Option| { + backend + .rpc() + .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) + .and_then(|keys| keys.first().cloned()) + }; + + let prefix = self.args.prefix.clone().map(StorageKey); + let start_key = self.args.start_at.clone().map(StorageKey); + + let maybe_next_key = if backend.before_fork { + // If RPC client is available, fetch remotely + if backend.rpc().is_some() { + remote_fetch(prefix, start_key, backend.block_hash) + } else { + // No RPC client, use local DB + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.start_at = self.args.start_at.as_deref(); + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten() + } + } else { + // First, try to get next key from local DB + let next_storage_key = if let Some(ref start) = self.args.start_at { + // If we have a start_at, use next_storage_key to get the next one after it + backend.db.read().next_storage_key(start).ok().flatten() + } else { + // No start_at, use raw_iter to get the first key with the prefix + let mut iter_args = sp_state_machine::backend::IterArgs::default(); + iter_args.prefix = self.args.prefix.as_deref(); + iter_args.stop_on_incomplete_database = true; + + let readable_db = backend.db.read(); + readable_db + .raw_iter(iter_args) + .map(|mut iter| iter.next_key(&readable_db)) + .map(|op| op.and_then(|result| result.ok())) + .ok() + .flatten() + }; + + // Filter by prefix if necessary + let next_storage_key = next_storage_key + .filter(|key| prefix.as_ref().map(|p| key.starts_with(&p.0)).unwrap_or(true)); + + let removed_key = start_key + .clone() + .or(prefix.clone()) + .map(|key| backend.removed_keys.read().contains_key(&key.0)) + .unwrap_or(false); + if next_storage_key.is_none() && !removed_key { + let maybe_next_key = if backend.rpc().is_some() { + remote_fetch(prefix, start_key, Some(backend.fork_block)) + } else { + None + }; + match maybe_next_key { + Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), + _ => None, + } + } else { + next_storage_key + } + }; + + log::trace!( + target: LAZY_LOADING_LOG_TARGET, + "next_pair: (prefix: {:?}, start_at: {:?}, next_key: {:?})", + self.args.prefix.clone().map(hex::encode), + self.args.start_at.clone().map(hex::encode), + maybe_next_key.clone().map(hex::encode) + ); + + let maybe_value = maybe_next_key + .clone() + .and_then(|key| (*backend).storage(key.as_slice()).ok()) + .flatten(); + + if let Some(next_key) = maybe_next_key { + if self + .args + .prefix + .clone() + .map(|filter_key| next_key.starts_with(&filter_key)) + .unwrap_or(false) + { + self.args.start_at = Some(next_key.clone()); + maybe_value.map(|value| Ok((next_key, value))) + } else { + self.complete = true; + None + } + } else { + self.complete = true; + None + } + } + + fn was_complete(&self) -> bool { + self.complete + } +} + +#[derive(Debug, Clone)] +pub struct ForkedLazyBackend { + pub(crate) rpc_client: Option>>, + pub(crate) block_hash: Option, + pub(crate) fork_block: Block::Hash, + pub(crate) db: Arc>>>, + pub(crate) removed_keys: Arc, ()>>>, + pub(crate) before_fork: bool, +} + +impl ForkedLazyBackend { + pub(crate) fn update_storage(&self, key: &[u8], value: &Option>) { + if let Some(val) = value { + let mut entries: HashMap, StorageCollection> = Default::default(); + entries.insert(None, vec![(key.to_vec(), Some(val.clone()))]); + + self.db.write().insert(entries, StateVersion::V1); + } + } + + #[inline] + pub(crate) fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } +} + +impl sp_state_machine::Backend> + for ForkedLazyBackend +{ + type Error = as sp_state_machine::Backend>>::Error; + type TrieBackendStorage = PrefixedMemoryDB>; + type RawIter = RawIter; + + fn storage(&self, key: &[u8]) -> Result, Self::Error> { + let remote_fetch = |block: Option| -> Option> { + self.rpc() + .and_then(|rpc| rpc.storage(StorageKey(key.to_vec()), block).ok()) + .flatten() + .map(|v| v.0) + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + // No RPC client, try to read from local DB + let readable_db = self.db.read(); + return Ok(readable_db.storage(key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_storage = readable_db.storage(key); + let value = match maybe_storage { + Ok(Some(data)) => Some(data), + _ if !self.removed_keys.read().contains_key(key) => { + // Only try remote fetch if RPC client is available + let result = + if self.rpc().is_some() { remote_fetch(Some(self.fork_block)) } else { None }; + + // Cache state + drop(readable_db); + self.update_storage(key, &result); + + result + } + _ => None, + }; + + Ok(value) + } + + fn storage_hash( + &self, + key: &[u8], + ) -> Result as sp_core::Hasher>::Out>, Self::Error> { + let remote_fetch = |block: Option| -> Result< + Option< as sp_core::Hasher>::Out>, + Self::Error, + > { + match self.rpc() { + Some(rpc) => rpc + .storage_hash(StorageKey(key.to_vec()), block) + .map_err(|e| format!("Failed to fetch storage hash from RPC: {e:?}")), + None => Ok(None), + } + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return remote_fetch(self.block_hash); + } else { + // No RPC client, try to read from local DB + return Ok(self.db.read().storage_hash(key).ok().flatten()); + } + } + + let storage_hash = self.db.read().storage_hash(key); + match storage_hash { + Ok(Some(hash)) => Ok(Some(hash)), + _ if !self.removed_keys.read().contains_key(key) => { + if self.rpc().is_some() { + remote_fetch(Some(self.fork_block)) + } else { + Ok(None) + } + } + _ => Ok(None), + } + } + + fn closest_merkle_value( + &self, + _key: &[u8], + ) -> Result< + Option as sp_core::Hasher>::Out>>, + Self::Error, + > { + unimplemented!("closest_merkle_value: unsupported feature for lazy loading") + } + + fn child_closest_merkle_value( + &self, + _child_info: &ChildInfo, + _key: &[u8], + ) -> Result< + Option as sp_core::Hasher>::Out>>, + Self::Error, + > { + unimplemented!("child_closest_merkle_value: unsupported feature for lazy loading") + } + + fn child_storage( + &self, + child_info: &ChildInfo, + key: &[u8], + ) -> Result, Self::Error> { + Ok(self.db.read().child_storage(child_info, key).ok().flatten()) + } + + fn child_storage_hash( + &self, + child_info: &ChildInfo, + key: &[u8], + ) -> Result as sp_core::Hasher>::Out>, Self::Error> { + Ok(self.db.read().child_storage_hash(child_info, key).ok().flatten()) + } + + fn next_storage_key( + &self, + key: &[u8], + ) -> Result, Self::Error> { + let remote_fetch = |block: Option| { + let start_key = Some(StorageKey(key.to_vec())); + self.rpc() + .and_then(|rpc| rpc.storage_keys_paged(start_key.clone(), 2, None, block).ok()) + .and_then(|keys| keys.last().cloned()) + }; + + let maybe_next_key = if self.before_fork { + // Before the fork checkpoint, try RPC first, then fall back to local DB + if self.rpc().is_some() { + remote_fetch(self.block_hash) + } else { + // No RPC client, try local DB + self.db.read().next_storage_key(key).ok().flatten() + } + } else { + // Try to get the next storage key from the local DB + let next_storage_key = self.db.read().next_storage_key(key); + match next_storage_key { + Ok(Some(next_key)) => Some(next_key), + // If not found locally and key is not marked as removed, fetch remotely + _ if !self.removed_keys.read().contains_key(key) => { + if self.rpc().is_some() { + remote_fetch(Some(self.fork_block)) + } else { + None + } + } + // Otherwise, there's no next key + _ => None, + } + } + .filter(|next_key| next_key != key); + + log::trace!( + target: LAZY_LOADING_LOG_TARGET, + "next_storage_key: (key: {:?}, next_key: {:?})", + hex::encode(key), + maybe_next_key.clone().map(hex::encode) + ); + + Ok(maybe_next_key) + } + + fn next_child_storage_key( + &self, + child_info: &ChildInfo, + key: &[u8], + ) -> Result, Self::Error> { + Ok(self.db.read().next_child_storage_key(child_info, key).ok().flatten()) + } + + fn storage_root<'a>( + &self, + delta: impl Iterator)>, + state_version: StateVersion, + ) -> ( as sp_core::Hasher>::Out, BackendTransaction>) + where + as sp_core::Hasher>::Out: Ord, + { + self.db.read().storage_root(delta, state_version) + } + + fn child_storage_root<'a>( + &self, + child_info: &ChildInfo, + delta: impl Iterator)>, + state_version: StateVersion, + ) -> ( as sp_core::Hasher>::Out, bool, BackendTransaction>) + where + as sp_core::Hasher>::Out: Ord, + { + self.db.read().child_storage_root(child_info, delta, state_version) + } + + fn raw_iter(&self, args: IterArgs<'_>) -> Result { + let clone = RawIterArgs { + prefix: args.prefix.map(|v| v.to_vec()), + start_at: args.start_at.map(|v| v.to_vec()), + }; + + Ok(RawIter:: { args: clone, complete: false, _phantom: Default::default() }) + } + + fn register_overlay_stats(&self, stats: &sp_state_machine::StateMachineStats) { + self.db.read().register_overlay_stats(stats) + } + + fn usage_info(&self) -> sp_state_machine::UsageInfo { + self.db.read().usage_info() + } +} + +impl AsTrieBackend> for ForkedLazyBackend { + type TrieBackendStorage = PrefixedMemoryDB>; + + fn as_trie_backend( + &self, + ) -> &sp_state_machine::TrieBackend> { + unimplemented!("`as_trie_backend` is not supported in lazy loading mode.") + } +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs new file mode 100644 index 0000000000000..25dfbe5b2fd17 --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -0,0 +1,482 @@ +mod block_import_operation; +mod blockchain; +mod forked_lazy_backend; + +pub use block_import_operation::BlockImportOperation; +pub use blockchain::Blockchain; +pub use forked_lazy_backend::ForkedLazyBackend; + +#[cfg(test)] +mod tests; + +use polkadot_sdk::{ + sc_client_api::{ + HeaderBackend, TrieCacheContext, UsageInfo, + backend::{self, AuxStore}, + }, + sp_blockchain, + sp_core::{H256, offchain::storage::InMemOffchainStorage}, + sp_runtime::{ + Justification, StateVersion, + traits::{Block as BlockT, Header as HeaderT, NumberFor, One, Saturating, Zero}, + }, +}; +use serde::de::DeserializeOwned; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use crate::substrate_node::lazy_loading::rpc_client::RPCClient; + +pub struct Backend { + pub(crate) rpc_client: Option>>, + pub(crate) fork_checkpoint: Block::Header, + states: parking_lot::RwLock>>, + pub(crate) blockchain: Blockchain, + import_lock: parking_lot::RwLock<()>, + pinned_blocks: parking_lot::RwLock>, +} + +impl Backend { + fn new(rpc_client: Option>>, fork_checkpoint: Block::Header) -> Self { + Self { + rpc_client: rpc_client.clone(), + states: Default::default(), + blockchain: Blockchain::new(rpc_client), + import_lock: Default::default(), + pinned_blocks: Default::default(), + fork_checkpoint, + } + } + + #[inline] + pub fn rpc(&self) -> Option<&dyn RPCClient> { + self.rpc_client.as_deref() + } +} + +impl AuxStore for Backend { + fn insert_aux< + 'a, + 'b: 'a, + 'c: 'a, + I: IntoIterator, + D: IntoIterator, + >( + &self, + _insert: I, + _delete: D, + ) -> sp_blockchain::Result<()> { + unimplemented!("`insert_aux` is not supported in lazy loading mode.") + } + + fn get_aux(&self, _key: &[u8]) -> sp_blockchain::Result>> { + unimplemented!("`get_aux` is not supported in lazy loading mode.") + } +} + +impl backend::Backend for Backend { + type BlockImportOperation = BlockImportOperation; + type Blockchain = Blockchain; + type State = ForkedLazyBackend; + type OffchainStorage = InMemOffchainStorage; + + fn begin_operation(&self) -> sp_blockchain::Result { + let old_state = self.state_at(Default::default(), TrieCacheContext::Trusted)?; + Ok(BlockImportOperation { + pending_block: None, + old_state, + new_state: None, + aux: Default::default(), + storage_updates: Default::default(), + child_storage_updates: Default::default(), + finalized_blocks: Default::default(), + set_head: None, + before_fork: false, + }) + } + + fn begin_state_operation( + &self, + operation: &mut Self::BlockImportOperation, + block: Block::Hash, + ) -> sp_blockchain::Result<()> { + operation.old_state = self.state_at(block, TrieCacheContext::Trusted)?; + Ok(()) + } + + fn commit_operation(&self, operation: Self::BlockImportOperation) -> sp_blockchain::Result<()> { + for (block, justification) in operation.finalized_blocks { + self.blockchain.finalize_header(block, justification)?; + } + + if let Some(pending_block) = operation.pending_block { + let old_state = &operation.old_state; + let (header, body, justification) = pending_block.block.into_inner(); + let hash = header.hash(); + + let storage_updates = operation.storage_updates.clone(); + let child_storage_updates = operation.child_storage_updates.clone(); + + let mut removed_keys_map = old_state.removed_keys.read().clone(); + for (key, value) in &storage_updates { + if value.is_some() { + removed_keys_map.remove(key); + } else { + removed_keys_map.insert(key.clone(), ()); + } + } + let new_removed_keys = Arc::new(parking_lot::RwLock::new(removed_keys_map)); + + let mut db_clone = old_state.db.read().clone(); + { + let mut entries = vec![(None, storage_updates.clone())]; + if !child_storage_updates.is_empty() { + entries.extend(child_storage_updates.iter().map(|(key, data)| { + (Some(polkadot_sdk::sp_storage::ChildInfo::new_default(key)), data.clone()) + })); + } + db_clone.insert(entries, StateVersion::V1); + } + let new_db = Arc::new(parking_lot::RwLock::new(db_clone)); + let new_state = ForkedLazyBackend { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: self.fork_checkpoint.hash(), + db: new_db, + removed_keys: new_removed_keys, + before_fork: operation.before_fork, + }; + self.states.write().insert(hash, new_state); + + self.blockchain.insert(hash, header, justification, body, pending_block.state)?; + } + + if !operation.aux.is_empty() { + self.blockchain.write_aux(operation.aux); + } + + if let Some(set_head) = operation.set_head { + self.blockchain.set_head(set_head)?; + } + + Ok(()) + } + + fn finalize_block( + &self, + hash: Block::Hash, + justification: Option, + ) -> sp_blockchain::Result<()> { + self.blockchain.finalize_header(hash, justification) + } + + fn append_justification( + &self, + hash: Block::Hash, + justification: Justification, + ) -> sp_blockchain::Result<()> { + self.blockchain.append_justification(hash, justification) + } + + fn blockchain(&self) -> &Self::Blockchain { + &self.blockchain + } + + fn usage_info(&self) -> Option { + None + } + + fn offchain_storage(&self) -> Option { + None + } + + fn state_at( + &self, + hash: Block::Hash, + _trie_cache_context: TrieCacheContext, + ) -> sp_blockchain::Result { + if hash == Default::default() { + return Ok(ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: self.fork_checkpoint.hash(), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + }); + } + + let (backend, should_write) = + self.states.read().get(&hash).cloned().map(|state| Ok((state, false))).unwrap_or_else( + || { + self.rpc() + .and_then(|rpc| rpc.header(Some(hash)).ok()) + .flatten() + .ok_or(sp_blockchain::Error::UnknownBlock(format!( + "Failed to fetch block header: {hash:?}" + ))) + .map(|header| { + let checkpoint = self.fork_checkpoint.clone(); + let state = if header.number().gt(checkpoint.number()) { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: checkpoint.hash(), + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, + } + } else { + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: checkpoint.hash(), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + } + }; + + (state, true) + }) + }, + )?; + + if should_write { + self.states.write().insert(hash, backend.clone()); + } + + Ok(backend) + } + + fn revert( + &self, + n: NumberFor, + revert_finalized: bool, + ) -> sp_blockchain::Result<(NumberFor, HashSet)> { + let mut storage = self.blockchain.storage.write(); + + if storage.blocks.is_empty() { + return Ok((Zero::zero(), HashSet::new())); + } + + let mut states = self.states.write(); + let pinned = self.pinned_blocks.read(); + + let mut target = n; + let original_finalized_number = storage.finalized_number; + + if !target.is_zero() && !revert_finalized { + let revertible = storage.best_number.saturating_sub(storage.finalized_number); + if target > revertible { + target = revertible; + } + } + + let mut reverted = NumberFor::::zero(); + let mut reverted_finalized = HashSet::new(); + + let mut current_hash = storage.best_hash; + let mut current_number = storage.best_number; + + while reverted < target { + if current_number.is_zero() { + break; + } + + if let Some(count) = pinned.get(¤t_hash) { + if *count > 0 { + break; + } + } + + let Some(block) = storage.blocks.get(¤t_hash) else { + break; + }; + + let header = block.header().clone(); + let number = *header.number(); + let parent_hash = header.parent_hash(); + let parent_number = number.saturating_sub(One::one()); + + let parent_becomes_leaf = if number.is_zero() { + false + } else { + !storage.blocks.iter().any(|(other_hash, stored)| { + *other_hash != current_hash && stored.header().parent_hash() == parent_hash + }) + }; + + let hash_to_remove = current_hash; + + storage.blocks.remove(&hash_to_remove); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash_to_remove { + storage.hashes.remove(&number); + } + } + states.remove(&hash_to_remove); + + storage.leaves.remove( + hash_to_remove, + number, + parent_becomes_leaf.then_some(*parent_hash), + ); + + if number <= original_finalized_number { + reverted_finalized.insert(hash_to_remove); + } + + reverted = reverted.saturating_add(One::one()); + + current_hash = *parent_hash; + current_number = parent_number; + + storage.best_hash = current_hash; + storage.best_number = current_number; + } + + let best_hash_after = storage.best_hash; + let best_number_after = storage.best_number; + let extra_leaves: Vec<_> = + storage.leaves.revert(best_hash_after, best_number_after).collect(); + + for (hash, number) in extra_leaves { + if let Some(count) = pinned.get(&hash) { + if *count > 0 { + return Err(sp_blockchain::Error::Backend(format!( + "Can't revert pinned block {hash:?}", + ))); + } + } + + storage.blocks.remove(&hash); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash { + storage.hashes.remove(&number); + } + } + states.remove(&hash); + + if number <= original_finalized_number { + reverted_finalized.insert(hash); + } + } + + storage.hashes.insert(best_number_after, best_hash_after); + + if storage.finalized_number > best_number_after { + storage.finalized_number = best_number_after; + } + + while storage.finalized_number > Zero::zero() + && !storage.hashes.contains_key(&storage.finalized_number) + { + storage.finalized_number = storage.finalized_number.saturating_sub(One::one()); + } + + if let Some(hash) = storage.hashes.get(&storage.finalized_number).copied() { + storage.finalized_hash = hash; + } else { + storage.finalized_hash = storage.genesis_hash; + } + + drop(pinned); + drop(states); + + Ok((reverted, reverted_finalized)) + } + + fn remove_leaf_block(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { + let best_hash = self.blockchain.info().best_hash; + + if best_hash == hash { + return Err(sp_blockchain::Error::Backend( + format!("Can't remove best block {hash:?}",), + )); + } + + let mut storage = self.blockchain.storage.write(); + + let Some(block) = storage.blocks.get(&hash) else { + return Err(sp_blockchain::Error::UnknownBlock(format!("{hash:?}"))); + }; + + let number = *block.header().number(); + let parent_hash = *block.header().parent_hash(); + + if !storage.leaves.contains(number, hash) { + return Err(sp_blockchain::Error::Backend(format!( + "Can't remove non-leaf block {hash:?}", + ))); + } + + if self.pinned_blocks.read().get(&hash).is_some_and(|count| *count > 0) { + return Err(sp_blockchain::Error::Backend(format!( + "Can't remove pinned block {hash:?}", + ))); + } + + let parent_becomes_leaf = if number.is_zero() { + false + } else { + !storage.blocks.iter().any(|(other_hash, stored)| { + *other_hash != hash && stored.header().parent_hash() == &parent_hash + }) + }; + + let mut states = self.states.write(); + + storage.blocks.remove(&hash); + if let Some(entry) = storage.hashes.get(&number) { + if *entry == hash { + storage.hashes.remove(&number); + } + } + states.remove(&hash); + + storage.leaves.remove(hash, number, parent_becomes_leaf.then_some(parent_hash)); + + Ok(()) + } + + fn get_import_lock(&self) -> &parking_lot::RwLock<()> { + &self.import_lock + } + + fn requires_full_sync(&self) -> bool { + false + } + + fn pin_block(&self, hash: ::Hash) -> sp_blockchain::Result<()> { + let mut blocks = self.pinned_blocks.write(); + *blocks.entry(hash).or_default() += 1; + Ok(()) + } + + fn unpin_block(&self, hash: ::Hash) { + let mut blocks = self.pinned_blocks.write(); + blocks.entry(hash).and_modify(|counter| *counter -= 1).or_insert(-1); + } +} + +impl backend::LocalBackend for Backend {} + +pub fn new_backend( + rpc_client: Option>>, + checkpoint: Block::Header, +) -> Result>, polkadot_sdk::sc_service::Error> +where + Block: BlockT + DeserializeOwned, + Block::Hash: From, +{ + let backend = Arc::new(Backend::new(rpc_client, checkpoint)); + Ok(backend) +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs new file mode 100644 index 0000000000000..95c9cc830171e --- /dev/null +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -0,0 +1,364 @@ +use super::*; +use mock_rpc::{Rpc, TestBlock, TestHeader}; +use polkadot_sdk::{ + sc_client_api::{Backend as BackendT, StateBackend}, + sp_runtime::{ + OpaqueExtrinsic, + traits::{BlakeTwo256, Header as HeaderT}, + }, + sp_state_machine::{self, StorageIterator}, + sp_storage::{StorageData, StorageKey}, +}; +use std::{ + collections::BTreeMap, + sync::atomic::{AtomicUsize, Ordering}, +}; + +#[cfg(test)] +mod mock_rpc { + use super::*; + use polkadot_sdk::sp_runtime::{ + Justifications, + generic::{Block as GenericBlock, Header, SignedBlock}, + traits::Header as HeaderT, + }; + + pub type TestHashing = BlakeTwo256; + pub type TestHeader = Header; + pub type TestExtrinsic = OpaqueExtrinsic; + pub type TestBlock = GenericBlock, TestExtrinsic>; + + #[derive(Default, Debug)] + pub struct Counters { + pub storage_calls: AtomicUsize, + pub storage_hash_calls: AtomicUsize, + pub storage_keys_paged_calls: AtomicUsize, + pub header_calls: AtomicUsize, + pub block_calls: AtomicUsize, + } + + /// Mockable RPC with interior mutability. + #[allow(clippy::type_complexity)] + #[derive(Clone, Default, Debug)] + pub struct Rpc { + pub counters: std::sync::Arc, + /// storage[(block_hash, key)] = value + pub storage: Arc>>, + /// storage_hash[(block_hash, key)] = hash + pub storage_hashes: + Arc>>, + /// storage_keys_paged[(block_hash, (prefix,start))] = Vec + pub storage_keys_pages: + Arc), Vec>>>, + /// headers[hash] = header + pub headers: Arc>>, + /// blocks[hash] = SignedBlock + pub blocks: Arc>>>, + } + + impl Rpc { + pub fn new() -> Self { + Self { + counters: std::sync::Arc::new(Counters::default()), + storage: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + storage_hashes: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + storage_keys_pages: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + headers: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + blocks: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + } + } + + pub fn put_storage(&self, at: Block::Hash, key: StorageKey, val: StorageData) { + self.storage.write().insert((at, key), val); + } + pub fn put_storage_keys_page( + &self, + at: Block::Hash, + prefix: Vec, + keys: Vec, + ) { + self.storage_keys_pages.write().insert((at, prefix), keys); + } + pub fn put_header(&self, h: Block::Header) { + self.headers.write().insert(h.hash(), h); + } + pub fn put_block(&self, block: Block, just: Option) { + let full = SignedBlock { block, justifications: just }; + self.blocks.write().insert(full.block.header().hash(), full); + } + } + + impl RPCClient for Rpc { + fn storage( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_calls.fetch_add(1, Ordering::Relaxed); + let map = self.storage.read(); + Ok(map.get(&(at.unwrap_or_default(), key)).cloned()) + } + + fn storage_hash( + &self, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_hash_calls.fetch_add(1, Ordering::Relaxed); + let bh = at.unwrap_or_default(); + let map = self.storage_hashes.read(); + Ok(map.get(&(bh, key)).copied()) + } + + fn storage_keys_paged( + &self, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.storage_keys_paged_calls.fetch_add(1, Ordering::Relaxed); + + use std::cmp::min; + + let bh = at.unwrap_or_default(); + let prefix = key.map(|k| k.0).unwrap_or_default(); + let start = start_key.map(|k| k.0); + + let map = self.storage_keys_pages.read(); + let mut all = map.get(&(bh, prefix.clone())).cloned().unwrap_or_default(); + + all.sort_by(|a, b| a.0.cmp(&b.0)); + + let mut filtered: Vec = + all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); + + if let Some(s) = start { + if let Some(pos) = filtered.iter().position(|k| k.0 == s) { + filtered = filtered.into_iter().skip(pos + 1).collect(); + } else { + filtered.retain(|k| k.0 > s); + } + } + + let take = min(filtered.len(), count as usize); + Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) + } + + fn header( + &self, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + self.counters.header_calls.fetch_add(1, Ordering::Relaxed); + let key = at.unwrap_or_default(); + let raw = self.headers.read().get(&key).cloned(); + Ok(raw) + } + + fn block( + &self, + hash: Option, + ) -> Result< + Option>, + jsonrpsee::core::ClientError, + > { + self.counters.block_calls.fetch_add(1, Ordering::Relaxed); + let key = hash.unwrap_or_default(); + let raw = self.blocks.read().get(&key).cloned(); + Ok(raw) + } + + fn block_hash( + &self, + _num: Option>, + ) -> Result, jsonrpsee::core::ClientError> { + todo!() + } + + fn system_chain(&self) -> Result { + todo!() + } + + fn system_properties( + &self, + ) -> Result { + todo!() + } + } +} + +type N = u32; +type TestBlockT = TestBlock; + +fn make_header(number: N, parent: ::Hash) -> TestHeader { + TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) +} + +fn make_block( + number: N, + parent: ::Hash, + xts: Vec, +) -> TestBlock { + let header = make_header(number, parent); + TestBlock::new(header, xts) +} + +fn checkpoint(n: N) -> TestHeader { + make_header(n, Default::default()) +} + +#[test] +fn before_fork_reads_remote_only() { + let rpc = std::sync::Arc::new(Rpc::new()); + // fork checkpoint at #100 + let cp = checkpoint(100); + let backend = Backend::::new(Some(rpc.clone()), cp); + + // state_at(Default::default()) => before_fork=true + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key = b":foo".to_vec(); + // prepare remote value at "block_hash = Default::default()" + let at = Default::default(); + rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); + + // read storage + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"bar".to_vec())); + + // not cached in DB: second read still goes to RPC + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"bar".to_vec())); + assert!(rpc.counters.storage_calls.load(Ordering::Relaxed) >= 2); +} + +#[test] +fn after_fork_first_fetch_caches_subsequent_hits_local() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // Build a block #11 > checkpoint (#10), with parent #10 + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); + + // remote storage at fork block (checkpoint hash) + let fork_hash = cp.hash(); + let key = b":k".to_vec(); + rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); + + // Grab state_at(#11): after_fork=false; local DB empty + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read fetches remote and caches + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"v".to_vec())); + + // Mutate RPC to detect second call (remove remote value) + // If second read still tries RPC, it would return None; but it should come from cache. + // So we do not change the mock; instead, assert RPC call count increases only once. + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let _ = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + assert_eq!(calls_before, calls_after, "second hit should be served from cache"); +} + +#[test] +fn removed_keys_prevents_remote_fetch() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // mark key as removed + let key = b":dead".to_vec(); + state.removed_keys.write().insert(key.clone(), ()); + + // Even if remote has a value, backend must not fetch it + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let v = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + + assert!(v.is_none()); + assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); +} + +#[test] +fn raw_iter_merges_local_then_remote() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(7); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + + // block #8 + let b8 = make_block(8, cp.hash(), vec![]); + rpc.put_header(b8.header.clone()); + rpc.put_block(b8.clone(), None); + let state = backend.state_at(b8.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // Preload local DB with key "a1" + state.update_storage(b"a1", &Some(b"v1".to_vec())); + + // Ensure storage_root is computed to make the key visible to raw_iter + let _ = state + .db + .write() + .storage_root(vec![(b"a1".as_ref(), Some(b"v1".as_ref()))].into_iter(), StateVersion::V1); + + // Remote has only "a2" under same prefix at fork block (not "a1") + rpc.put_storage_keys_page(cp.hash(), b"a".to_vec(), vec![StorageKey(b"a2".to_vec())]); + rpc.put_storage(cp.hash(), StorageKey(b"a2".to_vec()), StorageData(b"v2".to_vec())); + + let mut args = polkadot_sdk::sp_state_machine::IterArgs::default(); + args.prefix = Some(&b"a"[..]); + let mut it = state.raw_iter(args).unwrap(); + + // next_pair should return ("a1","v1") from local + let p1 = it.next_pair(&state).unwrap().unwrap(); + assert_eq!(p1.0, b"a1".to_vec()); + assert_eq!(p1.1, b"v1".to_vec()); + + // next_pair should now bring remote ("a2","v2") + let p2 = it.next_pair(&state).unwrap().unwrap(); + assert_eq!(p2.0, b"a2".to_vec()); + assert_eq!(p2.1, b"v2".to_vec()); + + // done + assert!(it.next_pair(&state).is_none()); + assert!(it.was_complete()); +} + +#[test] +fn blockchain_header_and_number_are_cached() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(3); + let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let chain = backend.blockchain(); + + // prepare one block w/ extrinsics + let xts: Vec = vec![]; + let b4 = make_block(4, cp.hash(), xts); + let h4 = b4.header().hash(); + rpc.put_block(b4, None); + + // first header() fetches RPC and caches as Full + let h = chain.header(h4).unwrap().unwrap(); + assert_eq!(h.hash(), h4); + + // number() should now return from cache (no extra RPC needed) + let calls_before = rpc.counters.block_calls.load(Ordering::Relaxed); + let number = chain.number(h4).unwrap().unwrap(); + let calls_after = rpc.counters.block_calls.load(Ordering::Relaxed); + + assert_eq!(number, 4); + assert_eq!(calls_before, calls_after, "number() should be served from cache after header()"); +} From e2c0f6790ee51d5bb0c732251bdfec6f41019b3e Mon Sep 17 00:00:00 2001 From: JimboJ <40345116+jimjbrettj@users.noreply.github.com> Date: Tue, 11 Nov 2025 12:35:37 -0300 Subject: [PATCH 19/44] anvil-polkadot: forking cli flags (#393) --- crates/anvil-polkadot/src/cmd.rs | 112 +++++++++++++++++++++++++++- crates/anvil-polkadot/src/config.rs | 106 +++++++++++++++++++++++++- 2 files changed, 212 insertions(+), 6 deletions(-) diff --git a/crates/anvil-polkadot/src/cmd.rs b/crates/anvil-polkadot/src/cmd.rs index bab435dc64aaa..b76ba0d54b5d1 100644 --- a/crates/anvil-polkadot/src/cmd.rs +++ b/crates/anvil-polkadot/src/cmd.rs @@ -1,15 +1,16 @@ use crate::config::{ - AccountGenerator, AnvilNodeConfig, CHAIN_ID, DEFAULT_MNEMONIC, SubstrateNodeConfig, + AccountGenerator, AnvilNodeConfig, CHAIN_ID, DEFAULT_MNEMONIC, ForkChoice, SubstrateNodeConfig, }; use alloy_genesis::Genesis; -use alloy_primitives::{U256, utils::Unit}; +use alloy_primitives::{B256, U256, utils::Unit}; use alloy_signer_local::coins_bip39::{English, Mnemonic}; use anvil_server::ServerConfig; use clap::Parser; +use core::fmt; use foundry_common::shell; use foundry_config::Chain; use rand_08::{SeedableRng, rngs::StdRng}; -use std::{net::IpAddr, path::PathBuf, time::Duration}; +use std::{net::IpAddr, path::PathBuf, str::FromStr, time::Duration}; #[derive(Clone, Debug, Parser)] pub struct NodeArgs { @@ -134,7 +135,20 @@ impl NodeArgs { .with_code_size_limit(self.evm.code_size_limit) .disable_code_size_limit(self.evm.disable_code_size_limit) .with_disable_default_create2_deployer(self.evm.disable_default_create2_deployer) - .with_memory_limit(self.evm.memory_limit); + .with_memory_limit(self.evm.memory_limit) + .with_fork_choice(match (self.evm.fork_block_number, self.evm.fork_transaction_hash) { + (Some(block), None) => Some(ForkChoice::Block(block)), + (None, Some(hash)) => Some(ForkChoice::Transaction(hash)), + _ => self + .evm + .fork_url + .as_ref() + .and_then(|f| f.block) + .map(|num| ForkChoice::Block(num as i128)), + }) + .with_eth_rpc_url(self.evm.fork_url.map(|fork| fork.url)) + .fork_request_timeout(self.evm.fork_request_timeout.map(Duration::from_millis)) + .fork_request_retries(self.evm.fork_request_retries); let substrate_node_config = SubstrateNodeConfig::new(&anvil_config); @@ -170,6 +184,56 @@ impl NodeArgs { #[derive(Clone, Debug, Parser)] #[command(next_help_heading = "EVM options")] pub struct AnvilEvmArgs { + /// Fetch state over a remote endpoint instead of starting from an empty state. + /// + /// If you want to fetch state from a specific block number, add a block number like `http://localhost:8545@1400000` or use the `--fork-block-number` argument. + #[arg( + long, + short, + visible_alias = "rpc-url", + value_name = "URL", + help_heading = "Fork config" + )] + pub fork_url: Option, + + /// Fetch state from a specific block number over a remote endpoint. + /// + /// If negative, the given value is subtracted from the `latest` block number. + /// + /// See --fork-url. + #[arg( + long, + requires = "fork_url", + value_name = "BLOCK", + help_heading = "Fork config", + allow_hyphen_values = true + )] + pub fork_block_number: Option, + + /// Fetch state from a specific transaction hash over a remote endpoint. + /// + /// See --fork-url. + #[arg( + long, + requires = "fork_url", + value_name = "TRANSACTION", + help_heading = "Fork config", + conflicts_with = "fork_block_number" + )] + pub fork_transaction_hash: Option, + + /// Timeout in ms for requests sent to remote JSON-RPC server in forking mode. + /// + /// Default value 45000 + #[arg(id = "timeout", long = "timeout", help_heading = "Fork config", requires = "fork_url")] + pub fork_request_timeout: Option, + + /// Number of retry requests for spurious networks (timed out requests) + /// + /// Default value 5 + #[arg(id = "retries", long = "retries", help_heading = "Fork config", requires = "fork_url")] + pub fork_request_retries: Option, + /// The block gas limit. #[arg(long, alias = "block-gas-limit", help_heading = "Environment config")] pub gas_limit: Option, @@ -245,6 +309,46 @@ pub struct AnvilEvmArgs { pub memory_limit: Option, } +/// Represents the input URL for a fork with an optional trailing block number: +/// `http://localhost:8545@1000000` +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ForkUrl { + /// The endpoint url + pub url: String, + /// Optional trailing block + pub block: Option, +} + +impl fmt::Display for ForkUrl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.url.fmt(f)?; + if let Some(block) = self.block { + write!(f, "@{block}")?; + } + Ok(()) + } +} + +impl FromStr for ForkUrl { + type Err = String; + + fn from_str(s: &str) -> Result { + if let Some((url, block)) = s.rsplit_once('@') { + if block == "latest" { + return Ok(Self { url: url.to_string(), block: None }); + } + // this will prevent false positives for auths `user:password@example.com` + if !block.is_empty() && !block.contains(':') && !block.contains('.') { + let block: u64 = block + .parse() + .map_err(|_| format!("Failed to parse block number: `{block}`"))?; + return Ok(Self { url: url.to_string(), block: Some(block) }); + } + } + Ok(Self { url: s.to_string(), block: None }) + } +} + /// Clap's value parser for genesis. Loads a genesis.json file. fn read_genesis_file(path: &str) -> Result { foundry_common::fs::read_json_file(path.as_ref()).map_err(|err| err.to_string()) diff --git a/crates/anvil-polkadot/src/config.rs b/crates/anvil-polkadot/src/config.rs index bc994c22978a0..b0fc99058cd7f 100644 --- a/crates/anvil-polkadot/src/config.rs +++ b/crates/anvil-polkadot/src/config.rs @@ -3,7 +3,7 @@ use crate::{ substrate_node::chain_spec::keypairs_from_private_keys, }; use alloy_genesis::Genesis; -use alloy_primitives::{Address, U256, hex, map::HashMap, utils::Unit}; +use alloy_primitives::{Address, TxHash, U256, hex, map::HashMap, utils::Unit}; use alloy_signer::Signer; use alloy_signer_local::{ MnemonicBuilder, PrivateKeySigner, @@ -11,7 +11,7 @@ use alloy_signer_local::{ }; use anvil_server::ServerConfig; use eyre::{Context, Result}; -use foundry_common::{duration_since_unix_epoch, sh_println}; +use foundry_common::{REQUEST_TIMEOUT, duration_since_unix_epoch, sh_println}; use polkadot_sdk::{ pallet_revive::evm::Account, sc_cli::{ @@ -331,6 +331,14 @@ pub struct AnvilNodeConfig { pub memory_limit: Option, /// Do not print log messages. pub silent: bool, + /// url of the rpc server that should be used for any rpc calls + pub eth_rpc_url: Option, + /// pins the block number or transaction hash for the state fork + pub fork_choice: Option, + /// Timeout in for requests sent to remote JSON-RPC server in forking mode + pub fork_request_timeout: Duration, + /// Number of request retries for spurious networks + pub fork_request_retries: u32, } impl AnvilNodeConfig { @@ -554,6 +562,10 @@ impl Default for AnvilNodeConfig { disable_default_create2_deployer: false, memory_limit: None, silent: false, + eth_rpc_url: None, + fork_choice: None, + fork_request_timeout: REQUEST_TIMEOUT, + fork_request_retries: 5, } } } @@ -857,6 +869,96 @@ impl AnvilNodeConfig { self.silent = silent; self } + + /// Sets the `eth_rpc_url` to use when forking + #[must_use] + pub fn with_eth_rpc_url>(mut self, eth_rpc_url: Option) -> Self { + self.eth_rpc_url = eth_rpc_url.map(Into::into); + self + } + + /// Sets the `fork_choice` to use to fork off from based on a block number + #[must_use] + pub fn with_fork_block_number>(self, fork_block_number: Option) -> Self { + self.with_fork_choice(fork_block_number.map(Into::into)) + } + + /// Sets the `fork_choice` to use to fork off from based on a transaction hash + #[must_use] + pub fn with_fork_transaction_hash>( + self, + fork_transaction_hash: Option, + ) -> Self { + self.with_fork_choice(fork_transaction_hash.map(Into::into)) + } + + /// Sets the `fork_choice` to use to fork off from + #[must_use] + pub fn with_fork_choice>(mut self, fork_choice: Option) -> Self { + self.fork_choice = fork_choice.map(Into::into); + self + } + + /// Sets the `fork_request_timeout` to use for requests + #[must_use] + pub fn fork_request_timeout(mut self, fork_request_timeout: Option) -> Self { + if let Some(fork_request_timeout) = fork_request_timeout { + self.fork_request_timeout = fork_request_timeout; + } + self + } + + /// Sets the `fork_request_retries` to use for spurious networks + #[must_use] + pub fn fork_request_retries(mut self, fork_request_retries: Option) -> Self { + if let Some(fork_request_retries) = fork_request_retries { + self.fork_request_retries = fork_request_retries; + } + self + } +} + +/// Fork delimiter used to specify which block or transaction to fork from. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ForkChoice { + /// Block number to fork from. + /// + /// If negative, the given value is subtracted from the `latest` block number. + Block(i128), + /// Transaction hash to fork from. + Transaction(TxHash), +} + +impl ForkChoice { + /// Returns the block number to fork from + pub fn block_number(&self) -> Option { + match self { + Self::Block(block_number) => Some(*block_number), + Self::Transaction(_) => None, + } + } + + /// Returns the transaction hash to fork from + pub fn transaction_hash(&self) -> Option { + match self { + Self::Block(_) => None, + Self::Transaction(transaction_hash) => Some(*transaction_hash), + } + } +} + +/// Convert a transaction hash into a ForkChoice +impl From for ForkChoice { + fn from(tx_hash: TxHash) -> Self { + Self::Transaction(tx_hash) + } +} + +/// Convert a decimal block number into a ForkChoice +impl From for ForkChoice { + fn from(block: u64) -> Self { + Self::Block(block as i128) + } } /// Can create dev accounts From f732f8cf87486b409a54415b7580bd6940dea9e2 Mon Sep 17 00:00:00 2001 From: Diego Date: Wed, 12 Nov 2025 19:25:52 -0300 Subject: [PATCH 20/44] Remove some unused deps, methods and fields --- Cargo.lock | 2 - crates/anvil-polkadot/Cargo.toml | 2 - .../backend/block_import_operation.rs | 1 - .../lazy_loading/backend/blockchain.rs | 39 +------------------ .../backend/forked_lazy_backend.rs | 6 +-- .../lazy_loading/backend/mod.rs | 6 +-- 6 files changed, 6 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1af19930ba371..3d3115e4c7c61 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1211,13 +1211,11 @@ dependencies = [ "foundry-test-utils", "futures", "futures-timer", - "hex-literal", "hyper 1.6.0", "indexmap 2.10.0", "itertools 0.14.0", "jsonrpsee", "libsecp256k1", - "log", "lru 0.16.0", "op-alloy-consensus 0.17.2", "op-alloy-rpc-types", diff --git a/crates/anvil-polkadot/Cargo.toml b/crates/anvil-polkadot/Cargo.toml index f509c2a015c91..3ff78737f19dc 100644 --- a/crates/anvil-polkadot/Cargo.toml +++ b/crates/anvil-polkadot/Cargo.toml @@ -129,8 +129,6 @@ rand_08.workspace = true eyre.workspace = true lru = "0.16.0" indexmap = "2.0" -log = { version = "0.4.21", default-features = false } -hex-literal = { version = "0.4.1", default-features = false } polkadot-core-primitives = "18.0.0" # cli diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs index e7b83a6319d64..bf1d70ac7d837 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs @@ -24,7 +24,6 @@ pub struct BlockImportOperation { pub(crate) child_storage_updates: ChildStorageCollection, pub(crate) finalized_blocks: Vec<(Block::Hash, Option)>, pub(crate) set_head: Option, - pub(crate) before_fork: bool, } impl BlockImportOperation { diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs index 57efea0e78722..07042a76dff21 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs @@ -13,7 +13,7 @@ use polkadot_sdk::{ }, }; use serde::de::DeserializeOwned; -use std::{collections::HashMap, ptr, sync::Arc}; +use std::{collections::HashMap, sync::Arc}; #[derive(PartialEq, Eq, Clone)] pub(crate) enum StoredBlock { @@ -75,7 +75,6 @@ pub(crate) struct BlockchainStorage { pub(crate) finalized_hash: Block::Hash, pub(crate) finalized_number: NumberFor, pub(crate) genesis_hash: Block::Hash, - pub(crate) header_cht_roots: HashMap, Block::Hash>, pub(crate) leaves: LeafSet>, pub(crate) aux: HashMap, Vec>, } @@ -98,7 +97,6 @@ impl Blockchain { finalized_hash: Default::default(), finalized_number: Zero::zero(), genesis_hash: Default::default(), - header_cht_roots: HashMap::new(), leaves: LeafSet::new(), aux: HashMap::new(), })); @@ -172,39 +170,6 @@ impl Blockchain { Ok(()) } - /// Get total number of blocks. - pub fn blocks_count(&self) -> usize { - self.storage.read().blocks.len() - } - - /// Compare this blockchain with another in-mem blockchain - pub fn equals_to(&self, other: &Self) -> bool { - // Check ptr equality first to avoid double read locks. - if ptr::eq(self, other) { - return true; - } - self.canon_equals_to(other) && self.storage.read().blocks == other.storage.read().blocks - } - - /// Compare canonical chain to other canonical chain. - pub fn canon_equals_to(&self, other: &Self) -> bool { - // Check ptr equality first to avoid double read locks. - if ptr::eq(self, other) { - return true; - } - let this = self.storage.read(); - let other = other.storage.read(); - this.hashes == other.hashes - && this.best_hash == other.best_hash - && this.best_number == other.best_number - && this.genesis_hash == other.genesis_hash - } - - /// Insert header CHT root. - pub fn insert_cht_root(&self, block: NumberFor, cht_root: Block::Hash) { - self.storage.write().header_cht_roots.insert(block, cht_root); - } - /// Set an existing block as head. pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> { let header = self @@ -314,7 +279,7 @@ impl HeaderBackend for Blockchain sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::Backend backend::Backend for Backend backend::Backend for Backend backend::Backend for Backend Date: Wed, 12 Nov 2025 19:56:33 -0300 Subject: [PATCH 21/44] Remove unused polkadot-core-primitives dep --- Cargo.lock | 1845 ++++++++++++------------------ crates/anvil-polkadot/Cargo.toml | 1 - 2 files changed, 707 insertions(+), 1139 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3d3115e4c7c61..579281bd0c2a4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1222,7 +1222,6 @@ dependencies = [ "pallet-revive-eth-rpc", "parity-scale-codec", "parking_lot 0.12.4", - "polkadot-core-primitives 18.0.0", "polkadot-sdk", "rand 0.8.5", "revm", @@ -1230,7 +1229,7 @@ dependencies = [ "secp256k1 0.28.2", "serde", "serde_json", - "sp-runtime-interface 24.0.0", + "sp-runtime-interface", "sqlx", "substrate-runtime", "subxt", @@ -1988,8 +1987,8 @@ dependencies = [ "parachains-common", "parachains-runtimes-test-utils", "parity-scale-codec", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "staging-parachain-info", "staging-xcm", "staging-xcm-builder", @@ -2016,8 +2015,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -2829,17 +2828,6 @@ dependencies = [ "parity-scale-codec", ] -[[package]] -name = "binary-merkle-tree" -version = "16.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95c9f6900c9fd344d53fbdfb36e1343429079d73f4168c8ef48884bf15616dbd" -dependencies = [ - "hash-db", - "log", - "parity-scale-codec", -] - [[package]] name = "bindgen" version = "0.69.5" @@ -3132,18 +3120,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bounded-collections" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ad8a0bed7827f0b07a5d23cec2e58cc02038a99e4ca81616cb2bb2025f804d" -dependencies = [ - "log", - "parity-scale-codec", - "scale-info", - "serde", -] - [[package]] name = "bounded-collections" version = "0.3.2" @@ -3175,9 +3151,9 @@ dependencies = [ "scale-info", "serde", "sp-consensus-grandpa", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-runtime", + "sp-std", ] [[package]] @@ -3191,9 +3167,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-io", + "sp-std", ] [[package]] @@ -3208,9 +3184,9 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-runtime", + "sp-std", ] [[package]] @@ -3225,9 +3201,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-runtime", + "sp-std", ] [[package]] @@ -3244,8 +3220,8 @@ dependencies = [ "pallet-utility", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime", + "sp-std", ] [[package]] @@ -3261,12 +3237,12 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-trie 29.0.0", + "sp-core", + "sp-io", + "sp-runtime", + "sp-state-machine", + "sp-std", + "sp-trie", "tracing", "trie-db", ] @@ -3283,12 +3259,12 @@ dependencies = [ "ed25519-dalek", "finality-grandpa", "parity-scale-codec", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-grandpa", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-trie 29.0.0", + "sp-core", + "sp-runtime", + "sp-std", + "sp-trie", ] [[package]] @@ -3302,9 +3278,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-io", + "sp-std", "staging-xcm", ] @@ -3315,8 +3291,8 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "staging-xcm", ] @@ -3331,9 +3307,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "snowbridge-core", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-runtime", + "sp-std", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -3369,12 +3345,12 @@ dependencies = [ "parachains-common", "parachains-runtimes-test-utils", "parity-scale-codec", - "sp-core 28.0.0", - "sp-io 30.0.0", + "sp-core", + "sp-io", "sp-keyring", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-tracing 16.0.0", + "sp-runtime", + "sp-std", + "sp-tracing", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -3402,11 +3378,11 @@ dependencies = [ "pallet-utility", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-trie 29.0.0", - "sp-weights 27.0.0", + "sp-io", + "sp-runtime", + "sp-std", + "sp-trie", + "sp-weights", "staging-xcm", "tracing", "tuplex", @@ -4704,9 +4680,9 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-aura", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -4721,8 +4697,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "staging-xcm", ] @@ -4750,14 +4726,14 @@ dependencies = [ "polkadot-runtime-parachains", "scale-info", "sp-consensus-babe", - "sp-core 28.0.0", - "sp-externalities 0.25.0", + "sp-core", + "sp-externalities", "sp-inherents", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-trie 29.0.0", + "sp-io", + "sp-runtime", + "sp-state-machine", + "sp-std", + "sp-trie", "sp-version", "staging-xcm", "staging-xcm-builder", @@ -4785,7 +4761,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -4800,7 +4776,7 @@ dependencies = [ "parity-scale-codec", "polkadot-primitives", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -4817,9 +4793,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-trie 29.0.0", + "sp-io", + "sp-runtime", + "sp-trie", ] [[package]] @@ -4832,8 +4808,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "staging-xcm", ] @@ -4854,9 +4830,9 @@ dependencies = [ "polkadot-runtime-common", "polkadot-runtime-parachains", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -4874,7 +4850,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", "staging-xcm", ] @@ -4893,13 +4869,13 @@ version = "0.7.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "parity-scale-codec", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "polkadot-parachain-primitives", "polkadot-primitives", "scale-info", "sp-api", - "sp-runtime 31.0.1", - "sp-trie 29.0.0", + "sp-runtime", + "sp-trie", "staging-xcm", "tracing", ] @@ -4913,9 +4889,9 @@ dependencies = [ "cumulus-primitives-core", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-trie 29.0.0", + "sp-trie", ] [[package]] @@ -4923,9 +4899,9 @@ name = "cumulus-primitives-proof-size-hostfunction" version = "0.2.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ - "sp-externalities 0.25.0", - "sp-runtime-interface 24.0.0", - "sp-trie 29.0.0", + "sp-externalities", + "sp-runtime-interface", + "sp-trie", ] [[package]] @@ -4942,7 +4918,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -4966,7 +4942,7 @@ dependencies = [ "pallet-asset-conversion", "parity-scale-codec", "polkadot-runtime-common", - "sp-runtime 31.0.1", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -4980,9 +4956,9 @@ dependencies = [ "cumulus-primitives-core", "parity-scale-codec", "polkadot-primitives", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "sp-trie 29.0.0", + "sp-runtime", + "sp-state-machine", + "sp-trie", ] [[package]] @@ -7249,12 +7225,12 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-runtime-interface 24.0.0", - "sp-storage 19.0.0", + "sp-application-crypto", + "sp-core", + "sp-io", + "sp-runtime", + "sp-runtime-interface", + "sp-storage", "static_assertions", ] @@ -7268,8 +7244,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -7307,11 +7283,11 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", + "sp-arithmetic", + "sp-core", "sp-npos-elections", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime", + "sp-std", ] [[package]] @@ -7326,10 +7302,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-tracing 16.0.0", + "sp-core", + "sp-io", + "sp-runtime", + "sp-tracing", ] [[package]] @@ -7357,7 +7333,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -7367,7 +7343,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "aquamarine", "array-bytes", - "binary-merkle-tree 13.0.0", + "binary-merkle-tree", "bitflags 1.3.2", "docify", "environmental", @@ -7383,21 +7359,21 @@ dependencies = [ "serde", "serde_json", "sp-api", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", + "sp-arithmetic", + "sp-core", "sp-crypto-hashing-proc-macro", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-debug-derive", "sp-genesis-builder", "sp-inherents", - "sp-io 30.0.0", + "sp-io", "sp-metadata-ir", - "sp-runtime 31.0.1", + "sp-runtime", "sp-staking", - "sp-state-machine 0.35.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-tracing 16.0.0", - "sp-trie 29.0.0", - "sp-weights 27.0.0", + "sp-state-machine", + "sp-std", + "sp-tracing", + "sp-trie", + "sp-weights", "tt-call", ] @@ -7455,11 +7431,11 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-version", - "sp-weights 27.0.0", + "sp-weights", ] [[package]] @@ -7472,8 +7448,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -7494,7 +7470,7 @@ dependencies = [ "frame-support", "parity-scale-codec", "sp-api", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -11483,10 +11459,10 @@ dependencies = [ "pallet-identity", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -11501,10 +11477,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -11519,10 +11495,10 @@ dependencies = [ "pallet-asset-conversion", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -11537,7 +11513,7 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -11550,8 +11526,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -11565,11 +11541,11 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", ] [[package]] @@ -11584,8 +11560,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -11600,8 +11576,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -11628,7 +11604,7 @@ dependencies = [ "pallet-assets", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -11663,9 +11639,9 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-aura", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -11678,9 +11654,9 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-authority-discovery", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -11693,7 +11669,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -11710,11 +11686,11 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-babe", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-session", "sp-staking", ] @@ -11734,10 +11710,10 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-tracing 16.0.0", + "sp-core", + "sp-io", + "sp-runtime", + "sp-tracing", ] [[package]] @@ -11752,8 +11728,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -11770,7 +11746,7 @@ dependencies = [ "scale-info", "serde", "sp-consensus-beefy", - "sp-runtime 31.0.1", + "sp-runtime", "sp-session", "sp-staking", ] @@ -11781,7 +11757,7 @@ version = "28.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "array-bytes", - "binary-merkle-tree 13.0.0", + "binary-merkle-tree", "frame-benchmarking", "frame-support", "frame-system", @@ -11794,10 +11770,10 @@ dependencies = [ "serde", "sp-api", "sp-consensus-beefy", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", + "sp-core", + "sp-io", + "sp-runtime", + "sp-state-machine", ] [[package]] @@ -11812,9 +11788,9 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -11831,8 +11807,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-consensus-grandpa", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime", + "sp-std", "tracing", ] @@ -11849,9 +11825,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-trie 29.0.0", + "sp-runtime", + "sp-std", + "sp-trie", "tracing", ] @@ -11870,8 +11846,8 @@ dependencies = [ "pallet-bridge-grandpa", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime", + "sp-std", "tracing", ] @@ -11893,8 +11869,8 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-runtime", "tracing", ] @@ -11911,9 +11887,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-runtime", ] [[package]] @@ -11929,9 +11905,9 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -11949,7 +11925,7 @@ dependencies = [ "parity-scale-codec", "rand 0.8.5", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", "sp-staking", ] @@ -11965,9 +11941,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -11980,8 +11956,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -12005,9 +11981,9 @@ dependencies = [ "serde", "smallvec", "sp-api", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "wasm-instrument", @@ -12034,11 +12010,11 @@ dependencies = [ "polkadot-runtime-parachains", "scale-info", "sp-api", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", - "sp-tracing 16.0.0", + "sp-core", + "sp-io", + "sp-keystore", + "sp-runtime", + "sp-tracing", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -12078,8 +12054,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -12094,10 +12070,10 @@ dependencies = [ "pallet-ranked-collective", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12110,8 +12086,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "sp-staking", ] @@ -12127,9 +12103,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12143,10 +12119,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -12163,8 +12139,8 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -12179,10 +12155,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12198,12 +12174,12 @@ dependencies = [ "parity-scale-codec", "rand 0.8.5", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", + "sp-arithmetic", + "sp-core", + "sp-io", "sp-npos-elections", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime", + "sp-std", ] [[package]] @@ -12219,11 +12195,11 @@ dependencies = [ "parity-scale-codec", "rand 0.8.5", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", + "sp-arithmetic", + "sp-core", + "sp-io", "sp-npos-elections", - "sp-runtime 31.0.1", + "sp-runtime", "strum 0.26.3", ] @@ -12237,7 +12213,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "sp-npos-elections", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -12251,10 +12227,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", + "sp-core", + "sp-io", "sp-npos-elections", - "sp-runtime 31.0.1", + "sp-runtime", "sp-staking", ] @@ -12271,8 +12247,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "sp-staking", ] @@ -12288,10 +12264,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -12307,11 +12283,11 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-grandpa", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-session", "sp-staking", ] @@ -12328,8 +12304,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -12344,10 +12320,10 @@ dependencies = [ "pallet-authorship", "parity-scale-codec", "scale-info", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-application-crypto", + "sp-core", + "sp-io", + "sp-runtime", "sp-staking", ] @@ -12361,9 +12337,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12387,7 +12363,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -12401,9 +12377,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12418,11 +12394,11 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", + "sp-weights", ] [[package]] @@ -12437,10 +12413,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", ] [[package]] @@ -12457,9 +12433,9 @@ dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12472,7 +12448,7 @@ dependencies = [ "polkadot-sdk-frame", "scale-info", "serde", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-mixnet", ] @@ -12500,9 +12476,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12541,9 +12517,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12587,11 +12563,11 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-staking", - "sp-tracing 16.0.0", + "sp-tracing", ] [[package]] @@ -12609,8 +12585,8 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", - "sp-runtime-interface 24.0.0", + "sp-runtime", + "sp-runtime-interface", "sp-staking", ] @@ -12635,7 +12611,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-runtime 31.0.1", + "sp-runtime", "sp-staking", ] @@ -12658,7 +12634,7 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", "sp-staking", ] @@ -12674,10 +12650,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-application-crypto 30.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-application-crypto", + "sp-io", + "sp-runtime", + "sp-std", ] [[package]] @@ -12688,7 +12664,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-std", ] [[package]] @@ -12703,10 +12679,10 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12734,8 +12710,8 @@ dependencies = [ "paste", "scale-info", "serde", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -12749,10 +12725,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", "verifiable", ] @@ -12767,9 +12743,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12794,10 +12770,10 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12822,9 +12798,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-arithmetic 23.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-io", + "sp-runtime", ] [[package]] @@ -12838,9 +12814,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -12882,13 +12858,13 @@ dependencies = [ "serde", "serde_json", "sp-api", - "sp-arithmetic 23.0.0", + "sp-arithmetic", "sp-consensus-aura", "sp-consensus-babe", "sp-consensus-slots", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-version", "substrate-bn", "subxt-signer", @@ -12916,15 +12892,15 @@ dependencies = [ "sc-service", "serde", "serde_json", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", + "sp-arithmetic", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io 30.0.0", + "sp-io", "sp-rpc", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-runtime", + "sp-weights", "sqlx", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "subxt", "subxt-signer", "thiserror 1.0.69", @@ -12943,8 +12919,8 @@ dependencies = [ "pallet-revive-uapi", "polkavm-linker 0.29.0", "serde_json", - "sp-core 28.0.0", - "sp-io 30.0.0", + "sp-core", + "sp-io", "toml 0.8.23", ] @@ -12984,8 +12960,8 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "sp-staking", ] @@ -12998,8 +12974,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -13040,9 +13016,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-io", + "sp-runtime", + "sp-weights", ] [[package]] @@ -13054,8 +13030,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -13071,13 +13047,13 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-session", "sp-staking", - "sp-state-machine 0.35.0", - "sp-trie 29.0.0", + "sp-state-machine", + "sp-trie", ] [[package]] @@ -13092,7 +13068,7 @@ dependencies = [ "pallet-staking", "parity-scale-codec", "rand 0.8.5", - "sp-runtime 31.0.1", + "sp-runtime", "sp-session", ] @@ -13105,7 +13081,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -13120,9 +13096,9 @@ dependencies = [ "parity-scale-codec", "rand_chacha 0.3.1", "scale-info", - "sp-arithmetic 23.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-io", + "sp-runtime", ] [[package]] @@ -13140,9 +13116,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-application-crypto 30.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-application-crypto", + "sp-io", + "sp-runtime", "sp-staking", ] @@ -13162,11 +13138,11 @@ dependencies = [ "rand_chacha 0.3.1", "scale-info", "serde", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", + "sp-application-crypto", + "sp-core", + "sp-io", "sp-npos-elections", - "sp-runtime 31.0.1", + "sp-runtime", "sp-staking", ] @@ -13185,8 +13161,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "sp-staking", ] @@ -13201,8 +13177,8 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "sp-staking", "staging-xcm", ] @@ -13213,7 +13189,7 @@ version = "19.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", - "sp-arithmetic 23.0.0", + "sp-arithmetic", ] [[package]] @@ -13232,7 +13208,7 @@ version = "19.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", - "sp-arithmetic 23.0.0", + "sp-arithmetic", ] [[package]] @@ -13256,9 +13232,9 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -13272,9 +13248,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "sp-statement-store", ] @@ -13289,8 +13265,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -13306,8 +13282,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-inherents", - "sp-runtime 31.0.1", - "sp-storage 19.0.0", + "sp-runtime", + "sp-storage", "sp-timestamp", ] @@ -13324,9 +13300,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -13341,8 +13317,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", ] [[package]] @@ -13353,8 +13329,8 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "sp-api", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-runtime", + "sp-weights", ] [[package]] @@ -13371,8 +13347,8 @@ dependencies = [ "scale-info", "serde", "sp-inherents", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "sp-transaction-storage-proof", ] @@ -13391,8 +13367,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -13417,7 +13393,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -13430,9 +13406,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", ] [[package]] @@ -13445,9 +13421,9 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-io", + "sp-runtime", + "sp-weights", ] [[package]] @@ -13461,7 +13437,7 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -13488,9 +13464,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -13508,8 +13484,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -13528,9 +13504,9 @@ dependencies = [ "pallet-bridge-messages", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-runtime", + "sp-std", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -13549,9 +13525,9 @@ dependencies = [ "parity-scale-codec", "polkadot-runtime-parachains", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-core", + "sp-runtime", + "sp-std", "staging-xcm", "staging-xcm-builder", "tracing", @@ -13593,9 +13569,9 @@ dependencies = [ "polkadot-runtime-common", "scale-info", "sp-consensus-aura", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-io", + "sp-runtime", "staging-parachain-info", "staging-xcm", "staging-xcm-executor", @@ -13623,29 +13599,16 @@ dependencies = [ "parity-scale-codec", "polkadot-parachain-primitives", "sp-consensus-aura", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-tracing 16.0.0", + "sp-core", + "sp-io", + "sp-runtime", + "sp-tracing", "staging-parachain-info", "staging-xcm", "staging-xcm-executor", "xcm-runtime-apis", ] -[[package]] -name = "parity-bip39" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" -dependencies = [ - "bitcoin_hashes 0.13.0", - "rand 0.8.5", - "rand_core 0.6.4", - "serde", - "unicode-normalization", -] - [[package]] name = "parity-db" version = "0.4.13" @@ -14077,20 +14040,8 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "parity-scale-codec", "scale-info", - "sp-core 28.0.0", - "sp-runtime 31.0.1", -] - -[[package]] -name = "polkadot-core-primitives" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85331e6e8c215034748a5afa4d985c4bc74e17a6704123749570591ddc2ac6c" -dependencies = [ - "parity-scale-codec", - "scale-info", - "sp-core 37.0.0", - "sp-runtime 42.0.0", + "sp-core", + "sp-runtime", ] [[package]] @@ -14102,12 +14053,12 @@ dependencies = [ "bounded-collections 0.3.2", "derive_more 0.99.20", "parity-scale-codec", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "scale-info", "serde", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-core", + "sp-runtime", + "sp-weights", ] [[package]] @@ -14120,22 +14071,22 @@ dependencies = [ "hex-literal", "log", "parity-scale-codec", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "polkadot-parachain-primitives", "scale-info", "serde", "sp-api", - "sp-application-crypto 30.0.0", - "sp-arithmetic 23.0.0", + "sp-application-crypto", + "sp-arithmetic", "sp-authority-discovery", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-io 30.0.0", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-keystore", + "sp-runtime", "sp-staking", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-std", "thiserror 1.0.69", ] @@ -14174,12 +14125,12 @@ dependencies = [ "serde", "slot-range-helper", "sp-api", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-io 30.0.0", + "sp-io", "sp-keyring", "sp-npos-elections", - "sp-runtime 31.0.1", + "sp-runtime", "sp-session", "sp-staking", "staging-xcm", @@ -14197,7 +14148,7 @@ dependencies = [ "frame-benchmarking", "parity-scale-codec", "polkadot-primitives", - "sp-tracing 16.0.0", + "sp-tracing", ] [[package]] @@ -14224,7 +14175,7 @@ dependencies = [ "pallet-staking", "pallet-timestamp", "parity-scale-codec", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "polkadot-parachain-primitives", "polkadot-primitives", "polkadot-runtime-metrics", @@ -14233,16 +14184,16 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 30.0.0", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", + "sp-application-crypto", + "sp-arithmetic", + "sp-core", "sp-inherents", - "sp-io 30.0.0", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-keystore", + "sp-runtime", "sp-session", "sp-staking", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-std", "staging-xcm", "staging-xcm-executor", ] @@ -14254,7 +14205,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "asset-test-utils", "assets-common", - "binary-merkle-tree 13.0.0", + "binary-merkle-tree", "bp-header-chain", "bp-messages", "bp-parachains", @@ -14420,7 +14371,7 @@ dependencies = [ "pallet-xcm-precompiles", "parachains-common", "parachains-runtimes-test-utils", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "polkadot-parachain-primitives", "polkadot-primitives", "polkadot-runtime-common", @@ -14458,8 +14409,8 @@ dependencies = [ "slot-range-helper", "sp-api", "sp-api-proc-macro", - "sp-application-crypto 30.0.0", - "sp-arithmetic 23.0.0", + "sp-application-crypto", + "sp-arithmetic", "sp-authority-discovery", "sp-block-builder", "sp-blockchain", @@ -14470,50 +14421,50 @@ dependencies = [ "sp-consensus-grandpa", "sp-consensus-pow", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-core-hashing", "sp-core-hashing-proc-macro", "sp-crypto-ec-utils", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-crypto-hashing-proc-macro", "sp-database", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-externalities 0.25.0", + "sp-debug-derive", + "sp-externalities", "sp-genesis-builder", "sp-inherents", - "sp-io 30.0.0", + "sp-io", "sp-keyring", - "sp-keystore 0.34.0", + "sp-keystore", "sp-metadata-ir", "sp-mixnet", "sp-mmr-primitives", "sp-npos-elections", "sp-offchain", - "sp-panic-handler 13.0.0", + "sp-panic-handler", "sp-rpc", - "sp-runtime 31.0.1", - "sp-runtime-interface 24.0.0", - "sp-runtime-interface-proc-macro 17.0.0", + "sp-runtime", + "sp-runtime-interface", + "sp-runtime-interface-proc-macro", "sp-session", "sp-staking", - "sp-state-machine 0.35.0", + "sp-state-machine", "sp-statement-store", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-storage 19.0.0", + "sp-std", + "sp-storage", "sp-timestamp", - "sp-tracing 16.0.0", + "sp-tracing", "sp-transaction-pool", "sp-transaction-storage-proof", - "sp-trie 29.0.0", + "sp-trie", "sp-version", "sp-version-proc-macro", - "sp-wasm-interface 20.0.0", - "sp-weights 27.0.0", + "sp-wasm-interface", + "sp-weights", "staging-parachain-info", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", - "substrate-bip39 0.4.7", + "substrate-bip39", "substrate-frame-rpc-support", "substrate-frame-rpc-system", "substrate-rpc-client", @@ -14540,19 +14491,19 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-arithmetic 23.0.0", + "sp-arithmetic", "sp-block-builder", "sp-consensus-aura", "sp-consensus-grandpa", - "sp-core 28.0.0", + "sp-core", "sp-genesis-builder", "sp-inherents", - "sp-io 30.0.0", + "sp-io", "sp-keyring", "sp-offchain", - "sp-runtime 31.0.1", + "sp-runtime", "sp-session", - "sp-storage 19.0.0", + "sp-storage", "sp-transaction-pool", "sp-version", ] @@ -14601,12 +14552,6 @@ dependencies = [ "log", ] -[[package]] -name = "polkavm-common" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed9e5af472f729fcf3b3c1cf17508ddbb3505259dd6e2ee0fb5a29e105d22" - [[package]] name = "polkavm-common" version = "0.26.0" @@ -14628,15 +14573,6 @@ dependencies = [ "polkavm-assembler 0.29.0", ] -[[package]] -name = "polkavm-derive" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176144f8661117ea95fa7cf868c9a62d6b143e8a2ebcb7582464c3faade8669a" -dependencies = [ - "polkavm-derive-impl-macro 0.24.0", -] - [[package]] name = "polkavm-derive" version = "0.26.0" @@ -14655,18 +14591,6 @@ dependencies = [ "polkavm-derive-impl-macro 0.29.0", ] -[[package]] -name = "polkavm-derive-impl" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5a21844afdfcc10c92b9ef288ccb926211af27478d1730fcd55e4aec710179d" -dependencies = [ - "polkavm-common 0.24.0", - "proc-macro2", - "quote", - "syn 2.0.104", -] - [[package]] name = "polkavm-derive-impl" version = "0.26.0" @@ -14691,16 +14615,6 @@ dependencies = [ "syn 2.0.104", ] -[[package]] -name = "polkavm-derive-impl-macro" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba0ef0f17ad81413ea1ca5b1b67553aedf5650c88269b673d3ba015c83bc2651" -dependencies = [ - "polkavm-derive-impl 0.24.0", - "syn 2.0.104", -] - [[package]] name = "polkavm-derive-impl-macro" version = "0.26.0" @@ -16144,9 +16058,9 @@ dependencies = [ "polkadot-primitives", "polkadot-runtime-common", "smallvec", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-core", + "sp-runtime", + "sp-weights", "staging-xcm", "staging-xcm-builder", ] @@ -16654,8 +16568,8 @@ version = "23.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", - "sp-core 28.0.0", - "sp-wasm-interface 20.0.0", + "sp-core", + "sp-wasm-interface", "thiserror 1.0.69", ] @@ -16674,11 +16588,11 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-runtime 31.0.1", - "sp-trie 29.0.0", - "substrate-prometheus-endpoint 0.17.0", + "sp-runtime", + "sp-trie", + "substrate-prometheus-endpoint", ] [[package]] @@ -16690,10 +16604,10 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-runtime 31.0.1", - "sp-trie 29.0.0", + "sp-runtime", + "sp-trie", ] [[package]] @@ -16713,13 +16627,13 @@ dependencies = [ "serde", "serde_json", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-genesis-builder", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "sp-tracing 16.0.0", + "sp-io", + "sp-runtime", + "sp-state-machine", + "sp-tracing", ] [[package]] @@ -16765,11 +16679,11 @@ dependencies = [ "serde", "serde_json", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-keyring", - "sp-keystore 0.34.0", - "sp-panic-handler 13.0.0", - "sp-runtime 31.0.1", + "sp-keystore", + "sp-panic-handler", + "sp-runtime", "sp-version", "thiserror 1.0.69", "tokio", @@ -16791,14 +16705,14 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core 28.0.0", + "sp-core", "sp-database", - "sp-externalities 0.25.0", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "sp-storage 19.0.0", - "sp-trie 29.0.0", - "substrate-prometheus-endpoint 0.17.0", + "sp-externalities", + "sp-runtime", + "sp-state-machine", + "sp-storage", + "sp-trie", + "substrate-prometheus-endpoint", ] [[package]] @@ -16818,14 +16732,14 @@ dependencies = [ "sc-client-api", "sc-state-db", "schnellru", - "sp-arithmetic 23.0.0", + "sp-arithmetic", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-database", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "sp-trie 29.0.0", - "substrate-prometheus-endpoint 0.17.0", + "sp-runtime", + "sp-state-machine", + "sp-trie", + "substrate-prometheus-endpoint", "sysinfo", ] @@ -16845,10 +16759,10 @@ dependencies = [ "serde", "sp-blockchain", "sp-consensus", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", - "substrate-prometheus-endpoint 0.17.0", + "sp-core", + "sp-runtime", + "sp-state-machine", + "substrate-prometheus-endpoint", "thiserror 1.0.69", ] @@ -16869,17 +16783,17 @@ dependencies = [ "sc-consensus-slots", "sc-telemetry", "sp-api", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-block-builder", "sp-blockchain", "sp-consensus", "sp-consensus-aura", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", - "substrate-prometheus-endpoint 0.17.0", + "sp-keystore", + "sp-runtime", + "substrate-prometheus-endpoint", "thiserror 1.0.69", ] @@ -16904,19 +16818,19 @@ dependencies = [ "sc-telemetry", "sc-transaction-pool-api", "sp-api", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-block-builder", "sp-blockchain", "sp-consensus", "sp-consensus-babe", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", "sp-inherents", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-keystore", + "sp-runtime", "sp-timestamp", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "thiserror 1.0.69", ] @@ -16930,7 +16844,7 @@ dependencies = [ "sc-client-api", "sc-consensus", "sp-blockchain", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -16959,12 +16873,12 @@ dependencies = [ "sp-consensus-aura", "sp-consensus-babe", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-keystore", + "sp-runtime", "sp-timestamp", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "thiserror 1.0.69", ] @@ -16981,14 +16895,14 @@ dependencies = [ "sc-client-api", "sc-consensus", "sc-telemetry", - "sp-arithmetic 23.0.0", + "sp-arithmetic", "sp-blockchain", "sp-consensus", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", + "sp-runtime", + "sp-state-machine", ] [[package]] @@ -17003,14 +16917,14 @@ dependencies = [ "sc-executor-wasmtime", "schnellru", "sp-api", - "sp-core 28.0.0", - "sp-externalities 0.25.0", - "sp-io 30.0.0", - "sp-panic-handler 13.0.0", - "sp-runtime-interface 24.0.0", - "sp-trie 29.0.0", + "sp-core", + "sp-externalities", + "sp-io", + "sp-panic-handler", + "sp-runtime-interface", + "sp-trie", "sp-version", - "sp-wasm-interface 20.0.0", + "sp-wasm-interface", "tracing", ] @@ -17022,7 +16936,7 @@ dependencies = [ "polkavm 0.26.0", "sc-allocator", "sp-maybe-compressed-blob", - "sp-wasm-interface 20.0.0", + "sp-wasm-interface", "thiserror 1.0.69", "wasm-instrument", ] @@ -17035,7 +16949,7 @@ dependencies = [ "log", "polkavm 0.26.0", "sc-executor-common", - "sp-wasm-interface 20.0.0", + "sp-wasm-interface", ] [[package]] @@ -17049,8 +16963,8 @@ dependencies = [ "rustix 1.0.8", "sc-allocator", "sc-executor-common", - "sp-runtime-interface 24.0.0", - "sp-wasm-interface 20.0.0", + "sp-runtime-interface", + "sp-wasm-interface", "wasmtime", ] @@ -17067,7 +16981,7 @@ dependencies = [ "sc-network", "sc-network-sync", "sp-blockchain", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -17078,9 +16992,9 @@ dependencies = [ "array-bytes", "parking_lot 0.12.4", "serde_json", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", - "sp-keystore 0.34.0", + "sp-application-crypto", + "sp-core", + "sp-keystore", "thiserror 1.0.69", ] @@ -17105,10 +17019,10 @@ dependencies = [ "sc-transaction-pool-api", "sp-api", "sp-consensus", - "sp-core 28.0.0", - "sp-keystore 0.34.0", + "sp-core", + "sp-keystore", "sp-mixnet", - "sp-runtime 31.0.1", + "sp-runtime", "thiserror 1.0.69", ] @@ -17148,11 +17062,11 @@ dependencies = [ "serde", "serde_json", "smallvec", - "sp-arithmetic 23.0.0", + "sp-arithmetic", "sp-blockchain", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "substrate-prometheus-endpoint 0.17.0", + "sp-core", + "sp-runtime", + "substrate-prometheus-endpoint", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17169,7 +17083,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "bitflags 1.3.2", "parity-scale-codec", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -17188,8 +17102,8 @@ dependencies = [ "sc-network", "sc-network-types", "sp-blockchain", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "thiserror 1.0.69", ] @@ -17216,13 +17130,13 @@ dependencies = [ "sc-utils", "schnellru", "smallvec", - "sp-arithmetic 23.0.0", + "sp-arithmetic", "sp-blockchain", "sp-consensus", "sp-consensus-grandpa", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "substrate-prometheus-endpoint 0.17.0", + "sp-core", + "sp-runtime", + "substrate-prometheus-endpoint", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17243,8 +17157,8 @@ dependencies = [ "sc-network-types", "sc-utils", "sp-consensus", - "sp-runtime 31.0.1", - "substrate-prometheus-endpoint 0.17.0", + "sp-runtime", + "substrate-prometheus-endpoint", ] [[package]] @@ -17274,7 +17188,7 @@ version = "0.17.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "log", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", ] [[package]] @@ -17298,11 +17212,11 @@ dependencies = [ "serde_json", "sp-api", "sp-blockchain", - "sp-core 28.0.0", - "sp-keystore 0.34.0", + "sp-core", + "sp-keystore", "sp-offchain", "sp-rpc", - "sp-runtime 31.0.1", + "sp-runtime", "sp-session", "sp-statement-store", "sp-version", @@ -17322,9 +17236,9 @@ dependencies = [ "scale-info", "serde", "serde_json", - "sp-core 28.0.0", + "sp-core", "sp-rpc", - "sp-runtime 31.0.1", + "sp-runtime", "sp-version", "thiserror 1.0.69", ] @@ -17347,7 +17261,7 @@ dependencies = [ "sc-rpc-api", "serde", "serde_json", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "tokio", "tower 0.4.13", "tower-http 0.5.2", @@ -17376,11 +17290,11 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-rpc", - "sp-runtime 31.0.1", + "sp-runtime", "sp-version", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17394,10 +17308,10 @@ dependencies = [ "parity-scale-codec", "sc-executor", "sc-executor-common", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-state-machine 0.35.0", - "sp-wasm-interface 20.0.0", + "sp-state-machine", + "sp-wasm-interface", "thiserror 1.0.69", ] @@ -17445,19 +17359,19 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core 28.0.0", - "sp-externalities 0.25.0", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-externalities", + "sp-keystore", + "sp-runtime", "sp-session", - "sp-state-machine 0.35.0", - "sp-storage 19.0.0", + "sp-state-machine", + "sp-storage", "sp-transaction-pool", "sp-transaction-storage-proof", - "sp-trie 29.0.0", + "sp-trie", "sp-version", "static_init", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "tempfile", "thiserror 1.0.69", "tokio", @@ -17473,7 +17387,7 @@ dependencies = [ "log", "parity-scale-codec", "parking_lot 0.12.4", - "sp-core 28.0.0", + "sp-core", ] [[package]] @@ -17491,9 +17405,9 @@ dependencies = [ "sc-telemetry", "serde", "serde_json", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io 30.0.0", + "sp-io", ] [[package]] @@ -17533,10 +17447,10 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-rpc", - "sp-runtime 31.0.1", - "sp-tracing 16.0.0", + "sp-runtime", + "sp-tracing", "thiserror 1.0.69", "tracing", "tracing-log", @@ -17573,12 +17487,12 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-runtime 31.0.1", - "sp-tracing 16.0.0", + "sp-runtime", + "sp-tracing", "sp-transaction-pool", - "substrate-prometheus-endpoint 0.17.0", + "substrate-prometheus-endpoint", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -17597,8 +17511,8 @@ dependencies = [ "parity-scale-codec", "serde", "sp-blockchain", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "thiserror 1.0.69", ] @@ -17613,7 +17527,7 @@ dependencies = [ "log", "parking_lot 0.12.4", "prometheus", - "sp-arithmetic 23.0.0", + "sp-arithmetic", ] [[package]] @@ -18515,7 +18429,7 @@ dependencies = [ "enumn", "parity-scale-codec", "paste", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -18703,11 +18617,11 @@ dependencies = [ "polkadot-parachain-primitives", "scale-info", "serde", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", @@ -18935,13 +18849,13 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api-proc-macro", - "sp-core 28.0.0", - "sp-externalities 0.25.0", + "sp-core", + "sp-externalities", "sp-metadata-ir", - "sp-runtime 31.0.1", - "sp-runtime-interface 24.0.0", - "sp-state-machine 0.35.0", - "sp-trie 29.0.0", + "sp-runtime", + "sp-runtime-interface", + "sp-state-machine", + "sp-trie", "sp-version", "thiserror 1.0.69", ] @@ -18968,21 +18882,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-io 30.0.0", -] - -[[package]] -name = "sp-application-crypto" -version = "41.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28c668f1ce424bc131f40ade33fa4c0bd4dcd2428479e1e291aad66d4b00c74f" -dependencies = [ - "parity-scale-codec", - "scale-info", - "serde", - "sp-core 37.0.0", - "sp-io 41.0.1", + "sp-core", + "sp-io", ] [[package]] @@ -18999,21 +18900,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "sp-arithmetic" -version = "27.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2929fd12ac6ca3cfac7f62885866810ba4e9464814dbaa87592b5b5681b29aee" -dependencies = [ - "docify", - "integer-sqrt", - "num-traits", - "parity-scale-codec", - "scale-info", - "serde", - "static_assertions", -] - [[package]] name = "sp-authority-discovery" version = "26.0.0" @@ -19022,8 +18908,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto 30.0.0", - "sp-runtime 31.0.1", + "sp-application-crypto", + "sp-runtime", ] [[package]] @@ -19033,7 +18919,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "sp-api", "sp-inherents", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -19047,10 +18933,10 @@ dependencies = [ "schnellru", "sp-api", "sp-consensus", - "sp-core 28.0.0", + "sp-core", "sp-database", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", + "sp-runtime", + "sp-state-machine", "thiserror 1.0.69", "tracing", ] @@ -19064,8 +18950,8 @@ dependencies = [ "futures", "log", "sp-inherents", - "sp-runtime 31.0.1", - "sp-state-machine 0.35.0", + "sp-runtime", + "sp-state-machine", "thiserror 1.0.69", ] @@ -19078,10 +18964,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-slots", "sp-inherents", - "sp-runtime 31.0.1", + "sp-runtime", "sp-timestamp", ] @@ -19095,11 +18981,11 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 30.0.0", + "sp-application-crypto", "sp-consensus-slots", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-runtime 31.0.1", + "sp-runtime", "sp-timestamp", ] @@ -19112,14 +18998,14 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", + "sp-application-crypto", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-io 30.0.0", - "sp-keystore 0.34.0", + "sp-io", + "sp-keystore", "sp-mmr-primitives", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-runtime", + "sp-weights", "strum 0.26.3", ] @@ -19134,10 +19020,10 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-application-crypto", + "sp-core", + "sp-keystore", + "sp-runtime", ] [[package]] @@ -19147,8 +19033,8 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "parity-scale-codec", "sp-api", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -19197,61 +19083,12 @@ dependencies = [ "serde", "sha2 0.10.9", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-externalities 0.25.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-storage 19.0.0", + "sp-debug-derive", + "sp-externalities", + "sp-std", + "sp-storage", "ss58-registry", - "substrate-bip39 0.4.7", - "thiserror 1.0.69", - "tracing", - "w3f-bls", - "zeroize", -] - -[[package]] -name = "sp-core" -version = "37.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e1a46a6b2323401e4489184846a7fb7d89091b42602a2391cd3ef652ede2850" -dependencies = [ - "ark-vrf", - "array-bytes", - "bitflags 1.3.2", - "blake2 0.10.6", - "bounded-collections 0.2.4", - "bs58", - "dyn-clone", - "ed25519-zebra", - "futures", - "hash-db", - "hash256-std-hasher", - "impl-serde", - "itertools 0.11.0", - "k256", - "libsecp256k1", - "log", - "merlin", - "parity-bip39", - "parity-scale-codec", - "parking_lot 0.12.4", - "paste", - "primitive-types 0.13.1", - "rand 0.8.5", - "scale-info", - "schnorrkel 0.11.5", - "secp256k1 0.28.2", - "secrecy 0.8.0", - "serde", - "sha2 0.10.9", - "sp-crypto-hashing 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sp-debug-derive 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sp-externalities 0.30.0", - "sp-runtime-interface 30.0.0", - "sp-std 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sp-storage 22.0.0", - "ss58-registry", - "substrate-bip39 0.6.0", + "substrate-bip39", "thiserror 1.0.69", "tracing", "w3f-bls", @@ -19291,7 +19128,7 @@ dependencies = [ "ark-ed-on-bls12-381-bandersnatch", "ark-ed-on-bls12-381-bandersnatch-ext", "ark-scale", - "sp-runtime-interface 24.0.0", + "sp-runtime-interface", ] [[package]] @@ -19340,17 +19177,6 @@ dependencies = [ "parking_lot 0.12.4", ] -[[package]] -name = "sp-debug-derive" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d09fa0a5f7299fb81ee25ae3853d26200f7a348148aed6de76be905c007dbe" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.104", -] - [[package]] name = "sp-debug-derive" version = "14.0.0" @@ -19368,18 +19194,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "environmental", "parity-scale-codec", - "sp-storage 19.0.0", -] - -[[package]] -name = "sp-externalities" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cbf059dce180a8bf8b6c8b08b6290fa3d1c7f069a60f1df038ab5dd5fc0ba6" -dependencies = [ - "environmental", - "parity-scale-codec", - "sp-storage 22.0.0", + "sp-storage", ] [[package]] @@ -19391,7 +19206,7 @@ dependencies = [ "scale-info", "serde_json", "sp-api", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -19403,7 +19218,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", "thiserror 1.0.69", ] @@ -19421,41 +19236,14 @@ dependencies = [ "polkavm-derive 0.26.0", "rustversion", "secp256k1 0.28.2", - "sp-core 28.0.0", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-externalities 0.25.0", - "sp-keystore 0.34.0", - "sp-runtime-interface 24.0.0", - "sp-state-machine 0.35.0", - "sp-tracing 16.0.0", - "sp-trie 29.0.0", - "tracing", - "tracing-core", -] - -[[package]] -name = "sp-io" -version = "41.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f244e9a2818d21220ceb0915ac73a462814a92d0c354a124a818abdb7f4f66" -dependencies = [ - "bytes", - "docify", - "ed25519-dalek", - "libsecp256k1", - "log", - "parity-scale-codec", - "polkavm-derive 0.24.0", - "rustversion", - "secp256k1 0.28.2", - "sp-core 37.0.0", - "sp-crypto-hashing 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sp-externalities 0.30.0", - "sp-keystore 0.43.0", - "sp-runtime-interface 30.0.0", - "sp-state-machine 0.46.0", - "sp-tracing 17.1.0", - "sp-trie 40.0.0", + "sp-externalities", + "sp-keystore", + "sp-runtime-interface", + "sp-state-machine", + "sp-tracing", + "sp-trie", "tracing", "tracing-core", ] @@ -19465,8 +19253,8 @@ name = "sp-keyring" version = "31.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", "strum 0.26.3", ] @@ -19477,20 +19265,8 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", - "sp-core 28.0.0", - "sp-externalities 0.25.0", -] - -[[package]] -name = "sp-keystore" -version = "0.43.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "269d0ee360f6d072f9203485afea35583ac151521a525cc48b2a107fc576c2d9" -dependencies = [ - "parity-scale-codec", - "parking_lot 0.12.4", - "sp-core 37.0.0", - "sp-externalities 0.30.0", + "sp-core", + "sp-externalities", ] [[package]] @@ -19520,7 +19296,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto 30.0.0", + "sp-application-crypto", ] [[package]] @@ -19534,9 +19310,9 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-core 28.0.0", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-runtime 31.0.1", + "sp-core", + "sp-debug-derive", + "sp-runtime", "thiserror 1.0.69", ] @@ -19548,9 +19324,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-arithmetic", + "sp-core", + "sp-runtime", ] [[package]] @@ -19559,8 +19335,8 @@ version = "26.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-api", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -19572,16 +19348,6 @@ dependencies = [ "regex", ] -[[package]] -name = "sp-panic-handler" -version = "13.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8b52e69a577cbfdea62bfaf16f59eb884422ce98f78b5cd8d9bf668776bced1" -dependencies = [ - "backtrace", - "regex", -] - [[package]] name = "sp-rpc" version = "26.0.0" @@ -19589,7 +19355,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "rustc-hash 1.1.0", "serde", - "sp-core 28.0.0", + "sp-core", ] [[package]] @@ -19597,7 +19363,7 @@ name = "sp-runtime" version = "31.0.1" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ - "binary-merkle-tree 13.0.0", + "binary-merkle-tree", "bytes", "docify", "either", @@ -19611,43 +19377,13 @@ dependencies = [ "scale-info", "serde", "simple-mermaid", - "sp-application-crypto 30.0.0", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-trie 29.0.0", - "sp-weights 27.0.0", - "tracing", - "tuplex", -] - -[[package]] -name = "sp-runtime" -version = "42.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b25d4d3811410317175ff121b3ff8c8b723504dadf37cd418b5192a5098d11bf" -dependencies = [ - "binary-merkle-tree 16.1.0", - "docify", - "either", - "hash256-std-hasher", - "impl-trait-for-tuples", - "log", - "num-traits", - "parity-scale-codec", - "paste", - "rand 0.8.5", - "scale-info", - "serde", - "simple-mermaid", - "sp-application-crypto 41.0.0", - "sp-arithmetic 27.0.0", - "sp-core 37.0.0", - "sp-io 41.0.1", - "sp-std 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sp-trie 40.0.0", - "sp-weights 32.0.0", + "sp-application-crypto", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-std", + "sp-trie", + "sp-weights", "tracing", "tuplex", ] @@ -19661,32 +19397,12 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "polkavm-derive 0.26.0", - "sp-externalities 0.25.0", - "sp-runtime-interface-proc-macro 17.0.0", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-storage 19.0.0", - "sp-tracing 16.0.0", - "sp-wasm-interface 20.0.0", - "static_assertions", -] - -[[package]] -name = "sp-runtime-interface" -version = "30.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fcd9c219da8c85d45d5ae1ce80e73863a872ac27424880322903c6ac893c06e" -dependencies = [ - "bytes", - "impl-trait-for-tuples", - "parity-scale-codec", - "polkavm-derive 0.24.0", - "primitive-types 0.13.1", - "sp-externalities 0.30.0", - "sp-runtime-interface-proc-macro 19.0.0", - "sp-std 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sp-storage 22.0.0", - "sp-tracing 17.1.0", - "sp-wasm-interface 22.0.0", + "sp-externalities", + "sp-runtime-interface-proc-macro", + "sp-std", + "sp-storage", + "sp-tracing", + "sp-wasm-interface", "static_assertions", ] @@ -19703,20 +19419,6 @@ dependencies = [ "syn 2.0.104", ] -[[package]] -name = "sp-runtime-interface-proc-macro" -version = "19.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca35431af10a450787ebfdcb6d7a91c23fa91eafe73a3f9d37db05c9ab36154b" -dependencies = [ - "Inflector", - "expander", - "proc-macro-crate 3.3.0", - "proc-macro2", - "quote", - "syn 2.0.104", -] - [[package]] name = "sp-session" version = "27.0.0" @@ -19725,9 +19427,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core 28.0.0", - "sp-keystore 0.34.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-keystore", + "sp-runtime", "sp-staking", ] @@ -19740,8 +19442,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -19755,31 +19457,10 @@ dependencies = [ "parking_lot 0.12.4", "rand 0.8.5", "smallvec", - "sp-core 28.0.0", - "sp-externalities 0.25.0", - "sp-panic-handler 13.0.0", - "sp-trie 29.0.0", - "thiserror 1.0.69", - "tracing", - "trie-db", -] - -[[package]] -name = "sp-state-machine" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "483422b016ee9ddba949db6d3092961ed58526520f0586df74dc07defd922a58" -dependencies = [ - "hash-db", - "log", - "parity-scale-codec", - "parking_lot 0.12.4", - "rand 0.8.5", - "smallvec", - "sp-core 37.0.0", - "sp-externalities 0.30.0", - "sp-panic-handler 13.0.2", - "sp-trie 40.0.0", + "sp-core", + "sp-externalities", + "sp-panic-handler", + "sp-trie", "thiserror 1.0.69", "tracing", "trie-db", @@ -19799,22 +19480,16 @@ dependencies = [ "scale-info", "sha2 0.10.9", "sp-api", - "sp-application-crypto 30.0.0", - "sp-core 28.0.0", + "sp-application-crypto", + "sp-core", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", - "sp-externalities 0.25.0", - "sp-runtime 31.0.1", - "sp-runtime-interface 24.0.0", + "sp-externalities", + "sp-runtime", + "sp-runtime-interface", "thiserror 1.0.69", "x25519-dalek", ] -[[package]] -name = "sp-std" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f8ee986414b0a9ad741776762f4083cd3a5128449b982a3919c4df36874834" - [[package]] name = "sp-std" version = "14.0.0" @@ -19829,20 +19504,7 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", -] - -[[package]] -name = "sp-storage" -version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee3b70ca340e41cde9d2e069d354508a6e37a6573d66f7cc38f11549002f64ec" -dependencies = [ - "impl-serde", - "parity-scale-codec", - "ref-cast", - "serde", - "sp-debug-derive 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-debug-derive", ] [[package]] @@ -19853,7 +19515,7 @@ dependencies = [ "async-trait", "parity-scale-codec", "sp-inherents", - "sp-runtime 31.0.1", + "sp-runtime", "thiserror 1.0.69", ] @@ -19869,25 +19531,13 @@ dependencies = [ "tracing-subscriber 0.3.20", ] -[[package]] -name = "sp-tracing" -version = "17.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6147a5b8c98b9ed4bf99dc033fab97a468b4645515460974c8784daeb7c35433" -dependencies = [ - "parity-scale-codec", - "tracing", - "tracing-core", - "tracing-subscriber 0.3.20", -] - [[package]] name = "sp-transaction-pool" version = "26.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" dependencies = [ "sp-api", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -19898,10 +19548,10 @@ dependencies = [ "async-trait", "parity-scale-codec", "scale-info", - "sp-core 28.0.0", + "sp-core", "sp-inherents", - "sp-runtime 31.0.1", - "sp-trie 29.0.0", + "sp-runtime", + "sp-trie", ] [[package]] @@ -19920,35 +19570,9 @@ dependencies = [ "rand 0.8.5", "scale-info", "schnellru", - "sp-core 28.0.0", - "sp-externalities 0.25.0", - "substrate-prometheus-endpoint 0.17.0", - "thiserror 1.0.69", - "tracing", - "trie-db", - "trie-root", -] - -[[package]] -name = "sp-trie" -version = "40.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b2e157c9cf44a1a9d20f3c69322e302db70399bf3f218211387fe009dd4041c" -dependencies = [ - "ahash", - "foldhash 0.1.5", - "hash-db", - "hashbrown 0.15.4", - "memory-db", - "nohash-hasher", - "parity-scale-codec", - "parking_lot 0.12.4", - "rand 0.8.5", - "scale-info", - "schnellru", - "sp-core 37.0.0", - "sp-externalities 0.30.0", - "substrate-prometheus-endpoint 0.17.7", + "sp-core", + "sp-externalities", + "substrate-prometheus-endpoint", "thiserror 1.0.69", "tracing", "trie-db", @@ -19966,8 +19590,8 @@ dependencies = [ "scale-info", "serde", "sp-crypto-hashing-proc-macro", - "sp-runtime 31.0.1", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", + "sp-runtime", + "sp-std", "sp-version-proc-macro", "thiserror 1.0.69", ] @@ -19996,18 +19620,6 @@ dependencies = [ "wasmtime", ] -[[package]] -name = "sp-wasm-interface" -version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdbc579c72fc03263894a0077383f543a093020d75741092511bb05a440ada6" -dependencies = [ - "anyhow", - "impl-trait-for-tuples", - "log", - "parity-scale-codec", -] - [[package]] name = "sp-weights" version = "27.0.0" @@ -20018,23 +19630,8 @@ dependencies = [ "scale-info", "serde", "smallvec", - "sp-arithmetic 23.0.0", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", -] - -[[package]] -name = "sp-weights" -version = "32.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8a1d448faceb064bb114df31fc45ff86ea2ee8fd17810c4357a578d081f7732" -dependencies = [ - "bounded-collections 0.2.4", - "parity-scale-codec", - "scale-info", - "serde", - "smallvec", - "sp-arithmetic 27.0.0", - "sp-debug-derive 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-arithmetic", + "sp-debug-derive", ] [[package]] @@ -20313,7 +19910,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -20331,8 +19928,8 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-runtime", + "sp-weights", "tracing", "xcm-procedural", ] @@ -20351,11 +19948,11 @@ dependencies = [ "parity-scale-codec", "polkadot-parachain-primitives", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", + "sp-weights", "staging-xcm", "staging-xcm-executor", "tracing", @@ -20372,11 +19969,11 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-arithmetic 23.0.0", - "sp-core 28.0.0", - "sp-io 30.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", + "sp-weights", "staging-xcm", "tracing", ] @@ -20557,19 +20154,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "substrate-bip39" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca58ffd742f693dc13d69bdbb2e642ae239e0053f6aab3b104252892f856700a" -dependencies = [ - "hmac 0.12.1", - "pbkdf2 0.12.2", - "schnorrkel 0.11.5", - "sha2 0.10.9", - "zeroize", -] - [[package]] name = "substrate-bn" version = "0.6.0" @@ -20594,7 +20178,7 @@ dependencies = [ "sc-rpc-api", "scale-info", "serde", - "sp-storage 19.0.0", + "sp-storage", ] [[package]] @@ -20613,8 +20197,8 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-blockchain", - "sp-core 28.0.0", - "sp-runtime 31.0.1", + "sp-core", + "sp-runtime", ] [[package]] @@ -20631,21 +20215,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "substrate-prometheus-endpoint" -version = "0.17.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23e4bc8e910a312820d589047ab683928b761242dbe31dee081fbdb37cbe0be" -dependencies = [ - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "log", - "prometheus", - "thiserror 1.0.69", - "tokio", -] - [[package]] name = "substrate-rpc-client" version = "0.33.0" @@ -20656,7 +20225,7 @@ dependencies = [ "log", "sc-rpc-api", "serde", - "sp-runtime 31.0.1", + "sp-runtime", ] [[package]] @@ -21216,10 +20785,10 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f dependencies = [ "cumulus-primitives-core", "frame-support", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "rococo-runtime-constants", "smallvec", - "sp-runtime 31.0.1", + "sp-runtime", "staging-xcm", "westend-runtime-constants", ] @@ -23091,9 +22660,9 @@ dependencies = [ "polkadot-primitives", "polkadot-runtime-common", "smallvec", - "sp-core 28.0.0", - "sp-runtime 31.0.1", - "sp-weights 27.0.0", + "sp-core", + "sp-runtime", + "sp-weights", "staging-xcm", "staging-xcm-builder", ] @@ -23783,7 +23352,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-weights 27.0.0", + "sp-weights", "staging-xcm", "staging-xcm-executor", ] @@ -23797,13 +23366,13 @@ dependencies = [ "frame-system", "parity-scale-codec", "paste", - "polkadot-core-primitives 7.0.0", + "polkadot-core-primitives", "polkadot-parachain-primitives", "polkadot-primitives", "polkadot-runtime-parachains", "scale-info", - "sp-io 30.0.0", - "sp-runtime 31.0.1", + "sp-io", + "sp-runtime", "staging-xcm", "staging-xcm-builder", "staging-xcm-executor", diff --git a/crates/anvil-polkadot/Cargo.toml b/crates/anvil-polkadot/Cargo.toml index 3ff78737f19dc..c1abe8416719b 100644 --- a/crates/anvil-polkadot/Cargo.toml +++ b/crates/anvil-polkadot/Cargo.toml @@ -129,7 +129,6 @@ rand_08.workspace = true eyre.workspace = true lru = "0.16.0" indexmap = "2.0" -polkadot-core-primitives = "18.0.0" # cli clap = { version = "4", features = [ From c3dc885901a093eba2107e88463183818f2fbc3b Mon Sep 17 00:00:00 2001 From: Diego Date: Wed, 12 Nov 2025 19:57:37 -0300 Subject: [PATCH 22/44] fix comment --- .../substrate_node/lazy_loading/backend/forked_lazy_backend.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index 57b85ac1be9b8..79c031636458f 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -32,7 +32,7 @@ pub struct RawIterArgs { pub start_at: Option>, } -/// A raw iterator over the `BenchmarkingState`. +/// A raw iterator over the storage keys. pub struct RawIter { pub(crate) args: RawIterArgs, complete: bool, From 3093410a43676a2aabe7fddd0bb95dc3ebc840d7 Mon Sep 17 00:00:00 2001 From: Diego Date: Wed, 12 Nov 2025 20:29:08 -0300 Subject: [PATCH 23/44] Refactor parking_lot::RWLock --- .../src/substrate_node/lazy_loading/backend/mod.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 9af49cef4e06a..498947689dab0 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -26,16 +26,17 @@ use std::{ collections::{HashMap, HashSet}, sync::Arc, }; +use parking_lot::RwLock; use crate::substrate_node::lazy_loading::rpc_client::RPCClient; pub struct Backend { pub(crate) rpc_client: Option>>, pub(crate) fork_checkpoint: Block::Header, - states: parking_lot::RwLock>>, + states: RwLock>>, pub(crate) blockchain: Blockchain, - import_lock: parking_lot::RwLock<()>, - pinned_blocks: parking_lot::RwLock>, + import_lock: RwLock<()>, + pinned_blocks: RwLock>, } impl Backend { @@ -126,7 +127,7 @@ impl backend::Backend for Backend backend::Backend for Backend backend::Backend for Backend &parking_lot::RwLock<()> { + fn get_import_lock(&self) -> &RwLock<()> { &self.import_lock } From 7c7caee64f0384abdccdb633f25d8639ba82e038 Mon Sep 17 00:00:00 2001 From: Diego Date: Wed, 12 Nov 2025 20:55:59 -0300 Subject: [PATCH 24/44] finish refactoring parking_lot::RWLock --- .../lazy_loading/backend/blockchain.rs | 5 +++-- .../lazy_loading/backend/tests.rs | 21 ++++++++++--------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs index 07042a76dff21..53a3d9adf8268 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs @@ -14,6 +14,7 @@ use polkadot_sdk::{ }; use serde::de::DeserializeOwned; use std::{collections::HashMap, sync::Arc}; +use parking_lot::RwLock; #[derive(PartialEq, Eq, Clone)] pub(crate) enum StoredBlock { @@ -83,13 +84,13 @@ pub(crate) struct BlockchainStorage { #[derive(Clone)] pub struct Blockchain { rpc_client: Option>>, - pub(crate) storage: Arc>>, + pub(crate) storage: Arc>>, } impl Blockchain { /// Create new in-memory blockchain storage. pub(crate) fn new(rpc_client: Option>>) -> Self { - let storage = Arc::new(parking_lot::RwLock::new(BlockchainStorage { + let storage = Arc::new(RwLock::new(BlockchainStorage { blocks: HashMap::new(), hashes: HashMap::new(), best_hash: Default::default(), diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 95c9cc830171e..6e788ab305c0f 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -1,5 +1,6 @@ use super::*; use mock_rpc::{Rpc, TestBlock, TestHeader}; +use parking_lot::RwLock; use polkadot_sdk::{ sc_client_api::{Backend as BackendT, StateBackend}, sp_runtime::{ @@ -43,28 +44,28 @@ mod mock_rpc { pub struct Rpc { pub counters: std::sync::Arc, /// storage[(block_hash, key)] = value - pub storage: Arc>>, + pub storage: Arc>>, /// storage_hash[(block_hash, key)] = hash pub storage_hashes: - Arc>>, + Arc>>, /// storage_keys_paged[(block_hash, (prefix,start))] = Vec pub storage_keys_pages: - Arc), Vec>>>, + Arc), Vec>>>, /// headers[hash] = header - pub headers: Arc>>, + pub headers: Arc>>, /// blocks[hash] = SignedBlock - pub blocks: Arc>>>, + pub blocks: Arc>>>, } impl Rpc { pub fn new() -> Self { Self { counters: std::sync::Arc::new(Counters::default()), - storage: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - storage_hashes: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - storage_keys_pages: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - headers: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), - blocks: std::sync::Arc::new(parking_lot::RwLock::new(BTreeMap::new())), + storage: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + storage_hashes: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + storage_keys_pages: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + headers: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + blocks: std::sync::Arc::new(RwLock::new(BTreeMap::new())), } } From d29abb0f725337c9149905556f87693c9f88e624 Mon Sep 17 00:00:00 2001 From: JimboJ <40345116+jimjbrettj@users.noreply.github.com> Date: Thu, 13 Nov 2025 05:35:09 -0300 Subject: [PATCH 25/44] anvil-polkadot: update forking feature branch with master (#400) --- Cargo.lock | 606 +++++++-------- Cargo.toml | 11 + .../anvil-polkadot/src/api_server/server.rs | 144 +++- crates/anvil-polkadot/src/cmd.rs | 5 - crates/anvil-polkadot/src/config.rs | 55 +- .../src/substrate_node/chain_spec.rs | 168 ++++- .../src/substrate_node/genesis.rs | 24 +- .../src/substrate_node/mining_engine.rs | 24 +- .../src/substrate_node/service/backend.rs | 31 + .../src/substrate_node/service/storage.rs | 6 + .../substrate-runtime/Cargo.toml | 1 + .../substrate-runtime/src/lib.rs | 28 +- crates/anvil-polkadot/test-data/genesis.json | 21 + crates/anvil-polkadot/tests/it/gas.rs | 171 +++++ crates/anvil-polkadot/tests/it/genesis.rs | 159 +++- crates/anvil-polkadot/tests/it/main.rs | 1 + crates/anvil-polkadot/tests/it/sign.rs | 2 - crates/anvil-polkadot/tests/it/snapshot.rs | 158 ++-- .../anvil-polkadot/tests/it/standard_rpc.rs | 24 +- .../anvil-polkadot/tests/it/state_injector.rs | 20 - crates/anvil-polkadot/tests/it/utils.rs | 22 + crates/cheatcodes/src/evm.rs | 2 +- crates/cheatcodes/src/inspector.rs | 78 +- crates/cheatcodes/src/lib.rs | 1 + crates/cheatcodes/src/strategy.rs | 4 + crates/evm/evm/src/inspectors/stack.rs | 13 +- crates/forge/src/runner.rs | 10 + crates/forge/tests/cli/revive_vm.rs | 202 ++--- crates/forge/tests/it/revive/cheat_etch.rs | 16 + .../tests/it/revive/cheat_gas_metering.rs | 49 ++ .../forge/tests/it/revive/cheat_mock_call.rs | 16 + .../forge/tests/it/revive/cheat_mock_calls.rs | 16 + .../tests/it/revive/cheat_mock_functions.rs | 43 ++ crates/forge/tests/it/revive/cheat_prank.rs | 16 + crates/forge/tests/it/revive/migration.rs | 22 +- crates/forge/tests/it/revive/mod.rs | 7 + crates/forge/tests/it/revive/tx_gas_price.rs | 60 ++ crates/revive-env/Cargo.toml | 9 +- crates/revive-env/src/runtime.rs | 1 + crates/revive-strategy/Cargo.toml | 10 +- .../src/cheatcodes/mock_handler.rs | 211 ++++++ crates/revive-strategy/src/cheatcodes/mod.rs | 268 +++++-- crates/revive-strategy/src/lib.rs | 2 +- crates/revive-strategy/tests/gas_metering.rs | 78 ++ crates/revive-utils/Cargo.toml | 9 +- testdata/default/revive/EtchTest.t.sol | 132 ++++ .../default/revive/EvmToReviveMigration.t.sol | 106 +++ testdata/default/revive/GasMetering.t.sol | 91 +++ testdata/default/revive/MockCall.t.sol | 422 +++++++++++ testdata/default/revive/MockCalls.t.sol | 65 ++ testdata/default/revive/MockFunction.t.sol | 75 ++ testdata/default/revive/Prank.t.sol | 701 ++++++++++++++++++ testdata/default/revive/TxGasPrice.t.sol | 71 ++ 53 files changed, 3711 insertions(+), 776 deletions(-) create mode 100644 crates/anvil-polkadot/test-data/genesis.json create mode 100644 crates/anvil-polkadot/tests/it/gas.rs create mode 100644 crates/forge/tests/it/revive/cheat_etch.rs create mode 100644 crates/forge/tests/it/revive/cheat_gas_metering.rs create mode 100644 crates/forge/tests/it/revive/cheat_mock_call.rs create mode 100644 crates/forge/tests/it/revive/cheat_mock_calls.rs create mode 100644 crates/forge/tests/it/revive/cheat_mock_functions.rs create mode 100644 crates/forge/tests/it/revive/cheat_prank.rs create mode 100644 crates/forge/tests/it/revive/tx_gas_price.rs create mode 100644 crates/revive-strategy/src/cheatcodes/mock_handler.rs create mode 100644 crates/revive-strategy/tests/gas_metering.rs create mode 100644 testdata/default/revive/EtchTest.t.sol create mode 100644 testdata/default/revive/GasMetering.t.sol create mode 100644 testdata/default/revive/MockCall.t.sol create mode 100644 testdata/default/revive/MockCalls.t.sol create mode 100644 testdata/default/revive/MockFunction.t.sol create mode 100644 testdata/default/revive/Prank.t.sol create mode 100644 testdata/default/revive/TxGasPrice.t.sol diff --git a/Cargo.lock b/Cargo.lock index 50177923b8e3b..7a599f8fb2586 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1969,7 +1969,7 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" [[package]] name = "asset-test-utils" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "cumulus-pallet-xcmp-queue", @@ -1999,7 +1999,7 @@ dependencies = [ [[package]] name = "assets-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "ethereum-standards", @@ -2821,7 +2821,7 @@ checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" [[package]] name = "binary-merkle-tree" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hash-db", "log", @@ -3142,7 +3142,7 @@ checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" [[package]] name = "bp-header-chain" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-runtime", "finality-grandpa", @@ -3159,7 +3159,7 @@ dependencies = [ [[package]] name = "bp-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-runtime", @@ -3175,7 +3175,7 @@ dependencies = [ [[package]] name = "bp-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -3192,7 +3192,7 @@ dependencies = [ [[package]] name = "bp-polkadot-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-messages", "bp-runtime", @@ -3209,7 +3209,7 @@ dependencies = [ [[package]] name = "bp-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -3227,7 +3227,7 @@ dependencies = [ [[package]] name = "bp-runtime" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -3250,7 +3250,7 @@ dependencies = [ [[package]] name = "bp-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-parachains", @@ -3270,7 +3270,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-messages", "bp-runtime", @@ -3287,7 +3287,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub-router" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -3299,7 +3299,7 @@ dependencies = [ [[package]] name = "bridge-hub-common" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -3318,7 +3318,7 @@ dependencies = [ [[package]] name = "bridge-hub-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "asset-test-utils", "bp-header-chain", @@ -3360,7 +3360,7 @@ dependencies = [ [[package]] name = "bridge-runtime-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -4671,7 +4671,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-aura-ext" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "frame-support", @@ -4688,7 +4688,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-dmp-queue" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-benchmarking", @@ -4705,7 +4705,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bytes", @@ -4743,7 +4743,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-parachain-system-proc-macro" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -4754,7 +4754,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-session-benchmarking" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -4767,7 +4767,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-solo-to-para" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "frame-support", @@ -4782,7 +4782,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-weight-reclaim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-storage-weight-reclaim", "derive-where", @@ -4801,7 +4801,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcm" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -4816,7 +4816,7 @@ dependencies = [ [[package]] name = "cumulus-pallet-xcmp-queue" version = "0.7.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "approx", "bounded-collections 0.3.2", @@ -4842,7 +4842,7 @@ dependencies = [ [[package]] name = "cumulus-ping" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-xcm", "cumulus-primitives-core", @@ -4857,7 +4857,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-aura" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-consensus-aura", @@ -4866,7 +4866,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-core-primitives", @@ -4883,7 +4883,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-parachain-inherent" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "cumulus-primitives-core", @@ -4897,7 +4897,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-proof-size-hostfunction" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-externalities", "sp-runtime-interface", @@ -4907,7 +4907,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-storage-weight-reclaim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-proof-size-hostfunction", @@ -4924,7 +4924,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-timestamp" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "sp-inherents", @@ -4934,7 +4934,7 @@ dependencies = [ [[package]] name = "cumulus-primitives-utility" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -4951,7 +4951,7 @@ dependencies = [ [[package]] name = "cumulus-test-relay-sproof-builder" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "parity-scale-codec", @@ -5938,7 +5938,7 @@ dependencies = [ [[package]] name = "ethereum-standards" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-core", ] @@ -6504,7 +6504,7 @@ dependencies = [ [[package]] name = "fork-tree" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", ] @@ -7213,7 +7213,7 @@ checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-support-procedural", @@ -7237,7 +7237,7 @@ dependencies = [ [[package]] name = "frame-benchmarking-pallet-pov" version = "18.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -7265,7 +7265,7 @@ dependencies = [ [[package]] name = "frame-election-provider-solution-type" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -7276,7 +7276,7 @@ dependencies = [ [[package]] name = "frame-election-provider-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-election-provider-solution-type", "frame-support", @@ -7293,7 +7293,7 @@ dependencies = [ [[package]] name = "frame-executive" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aquamarine", "frame-support", @@ -7323,7 +7323,7 @@ dependencies = [ [[package]] name = "frame-metadata-hash-extension" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "const-hex", @@ -7339,7 +7339,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aquamarine", "array-bytes", @@ -7380,7 +7380,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "cfg-expr", @@ -7400,7 +7400,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.3.0", @@ -7412,7 +7412,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -7422,7 +7422,7 @@ dependencies = [ [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cfg-if", "docify", @@ -7441,7 +7441,7 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -7455,7 +7455,7 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "parity-scale-codec", @@ -7465,7 +7465,7 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "parity-scale-codec", @@ -9397,9 +9397,9 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8beb5ce840610e5a945f0306f6e7a2d5b3e68ea3e64e9a4f081fa4ee5aa6525" +checksum = "3b089b6062662d720a836f055931434439fcd3a90f0059db0b831a99da6db460" dependencies = [ "kvdb", "num_cpus", @@ -11449,7 +11449,7 @@ dependencies = [ [[package]] name = "pallet-alliance" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11468,7 +11468,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11486,7 +11486,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion-ops" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11504,7 +11504,7 @@ dependencies = [ [[package]] name = "pallet-asset-conversion-tx-payment" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11519,7 +11519,7 @@ dependencies = [ [[package]] name = "pallet-asset-rate" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11533,7 +11533,7 @@ dependencies = [ [[package]] name = "pallet-asset-rewards" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11551,7 +11551,7 @@ dependencies = [ [[package]] name = "pallet-asset-tx-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11567,7 +11567,7 @@ dependencies = [ [[package]] name = "pallet-assets" version = "29.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11583,7 +11583,7 @@ dependencies = [ [[package]] name = "pallet-assets-freezer" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "pallet-assets", @@ -11595,7 +11595,7 @@ dependencies = [ [[package]] name = "pallet-assets-holder" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11610,7 +11610,7 @@ dependencies = [ [[package]] name = "pallet-assets-precompiles" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ethereum-standards", "frame-support", @@ -11621,7 +11621,7 @@ dependencies = [ [[package]] name = "pallet-atomic-swap" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -11631,7 +11631,7 @@ dependencies = [ [[package]] name = "pallet-aura" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11647,7 +11647,7 @@ dependencies = [ [[package]] name = "pallet-authority-discovery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11662,7 +11662,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11675,7 +11675,7 @@ dependencies = [ [[package]] name = "pallet-babe" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11698,7 +11698,7 @@ dependencies = [ [[package]] name = "pallet-bags-list" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aquamarine", "docify", @@ -11719,7 +11719,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -11735,7 +11735,7 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -11754,7 +11754,7 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "binary-merkle-tree", @@ -11779,7 +11779,7 @@ dependencies = [ [[package]] name = "pallet-bounties" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11796,7 +11796,7 @@ dependencies = [ [[package]] name = "pallet-bridge-grandpa" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-runtime", @@ -11815,7 +11815,7 @@ dependencies = [ [[package]] name = "pallet-bridge-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -11834,7 +11834,7 @@ dependencies = [ [[package]] name = "pallet-bridge-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-parachains", @@ -11854,7 +11854,7 @@ dependencies = [ [[package]] name = "pallet-bridge-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-header-chain", "bp-messages", @@ -11877,7 +11877,7 @@ dependencies = [ [[package]] name = "pallet-broker" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitvec", "frame-benchmarking", @@ -11895,7 +11895,7 @@ dependencies = [ [[package]] name = "pallet-child-bounties" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11913,7 +11913,7 @@ dependencies = [ [[package]] name = "pallet-collator-selection" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11932,7 +11932,7 @@ dependencies = [ [[package]] name = "pallet-collective" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -11949,7 +11949,7 @@ dependencies = [ [[package]] name = "pallet-collective-content" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -11963,7 +11963,7 @@ dependencies = [ [[package]] name = "pallet-contracts" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-benchmarking", @@ -11993,7 +11993,7 @@ dependencies = [ [[package]] name = "pallet-contracts-mock-network" version = "3.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12024,7 +12024,7 @@ dependencies = [ [[package]] name = "pallet-contracts-proc-macro" version = "18.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -12034,7 +12034,7 @@ dependencies = [ [[package]] name = "pallet-contracts-uapi" version = "5.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitflags 1.3.2", "parity-scale-codec", @@ -12045,7 +12045,7 @@ dependencies = [ [[package]] name = "pallet-conviction-voting" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "assert_matches", "frame-benchmarking", @@ -12061,7 +12061,7 @@ dependencies = [ [[package]] name = "pallet-core-fellowship" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12079,7 +12079,7 @@ dependencies = [ [[package]] name = "pallet-delegated-staking" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12094,7 +12094,7 @@ dependencies = [ [[package]] name = "pallet-democracy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12111,7 +12111,7 @@ dependencies = [ [[package]] name = "pallet-derivatives" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12131,7 +12131,7 @@ dependencies = [ [[package]] name = "pallet-dev-mode" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12146,7 +12146,7 @@ dependencies = [ [[package]] name = "pallet-dummy-dim" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12164,7 +12164,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-block" version = "0.9.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12185,7 +12185,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-phase" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12206,7 +12206,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-support-benchmarking" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12219,7 +12219,7 @@ dependencies = [ [[package]] name = "pallet-elections-phragmen" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12237,7 +12237,7 @@ dependencies = [ [[package]] name = "pallet-fast-unstake" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12255,7 +12255,7 @@ dependencies = [ [[package]] name = "pallet-glutton" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "blake2 0.10.6", "frame-benchmarking", @@ -12273,7 +12273,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12295,7 +12295,7 @@ dependencies = [ [[package]] name = "pallet-identity" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "enumflags2", "frame-benchmarking", @@ -12311,7 +12311,7 @@ dependencies = [ [[package]] name = "pallet-im-online" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12330,7 +12330,7 @@ dependencies = [ [[package]] name = "pallet-indices" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12345,7 +12345,7 @@ dependencies = [ [[package]] name = "pallet-insecure-randomness-collective-flip" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12356,7 +12356,7 @@ dependencies = [ [[package]] name = "pallet-lottery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12369,7 +12369,7 @@ dependencies = [ [[package]] name = "pallet-membership" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12385,7 +12385,7 @@ dependencies = [ [[package]] name = "pallet-message-queue" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-benchmarking", @@ -12404,7 +12404,7 @@ dependencies = [ [[package]] name = "pallet-meta-tx" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12422,7 +12422,7 @@ dependencies = [ [[package]] name = "pallet-migrations" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12441,7 +12441,7 @@ dependencies = [ [[package]] name = "pallet-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12455,7 +12455,7 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12467,7 +12467,7 @@ dependencies = [ [[package]] name = "pallet-multi-asset-bounties" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12484,7 +12484,7 @@ dependencies = [ [[package]] name = "pallet-multisig" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12495,7 +12495,7 @@ dependencies = [ [[package]] name = "pallet-nft-fractionalization" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "pallet-assets", @@ -12508,7 +12508,7 @@ dependencies = [ [[package]] name = "pallet-nfts" version = "22.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "enumflags2", "frame-benchmarking", @@ -12525,7 +12525,7 @@ dependencies = [ [[package]] name = "pallet-nfts-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -12534,7 +12534,7 @@ dependencies = [ [[package]] name = "pallet-nis" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12544,7 +12544,7 @@ dependencies = [ [[package]] name = "pallet-node-authorization" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -12555,7 +12555,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12573,7 +12573,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-benchmarking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12593,7 +12593,7 @@ dependencies = [ [[package]] name = "pallet-nomination-pools-runtime-api" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "pallet-nomination-pools", "parity-scale-codec", @@ -12603,7 +12603,7 @@ dependencies = [ [[package]] name = "pallet-offences" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12618,7 +12618,7 @@ dependencies = [ [[package]] name = "pallet-offences-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -12641,7 +12641,7 @@ dependencies = [ [[package]] name = "pallet-oracle" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12659,7 +12659,7 @@ dependencies = [ [[package]] name = "pallet-oracle-runtime-api" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -12670,7 +12670,7 @@ dependencies = [ [[package]] name = "pallet-origin-restriction" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12688,7 +12688,7 @@ dependencies = [ [[package]] name = "pallet-paged-list" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "parity-scale-codec", @@ -12700,7 +12700,7 @@ dependencies = [ [[package]] name = "pallet-parameters" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -12717,7 +12717,7 @@ dependencies = [ [[package]] name = "pallet-people" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12735,7 +12735,7 @@ dependencies = [ [[package]] name = "pallet-preimage" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12751,7 +12751,7 @@ dependencies = [ [[package]] name = "pallet-proxy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12761,7 +12761,7 @@ dependencies = [ [[package]] name = "pallet-ranked-collective" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12779,7 +12779,7 @@ dependencies = [ [[package]] name = "pallet-recovery" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -12789,7 +12789,7 @@ dependencies = [ [[package]] name = "pallet-referenda" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12806,7 +12806,7 @@ dependencies = [ [[package]] name = "pallet-remark" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -12822,7 +12822,7 @@ dependencies = [ [[package]] name = "pallet-revive" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-consensus", "alloy-core", @@ -12873,7 +12873,7 @@ dependencies = [ [[package]] name = "pallet-revive-eth-rpc" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "anyhow", "clap", @@ -12898,6 +12898,7 @@ dependencies = [ "sp-io", "sp-rpc", "sp-runtime", + "sp-timestamp", "sp-weights", "sqlx", "substrate-prometheus-endpoint", @@ -12910,7 +12911,7 @@ dependencies = [ [[package]] name = "pallet-revive-fixtures" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-core", "anyhow", @@ -12927,7 +12928,7 @@ dependencies = [ [[package]] name = "pallet-revive-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -12937,7 +12938,7 @@ dependencies = [ [[package]] name = "pallet-revive-uapi" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "alloy-core", "bitflags 1.3.2", @@ -12952,7 +12953,7 @@ dependencies = [ [[package]] name = "pallet-root-offences" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12968,7 +12969,7 @@ dependencies = [ [[package]] name = "pallet-root-testing" version = "4.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -12981,7 +12982,7 @@ dependencies = [ [[package]] name = "pallet-safe-mode" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "pallet-balances", @@ -12995,7 +12996,7 @@ dependencies = [ [[package]] name = "pallet-salary" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "pallet-ranked-collective", @@ -13007,7 +13008,7 @@ dependencies = [ [[package]] name = "pallet-scheduler" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13024,7 +13025,7 @@ dependencies = [ [[package]] name = "pallet-scored-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13037,7 +13038,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13059,7 +13060,7 @@ dependencies = [ [[package]] name = "pallet-session-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13075,7 +13076,7 @@ dependencies = [ [[package]] name = "pallet-skip-feeless-payment" version = "3.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13087,7 +13088,7 @@ dependencies = [ [[package]] name = "pallet-society" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13104,7 +13105,7 @@ dependencies = [ [[package]] name = "pallet-staking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -13125,7 +13126,7 @@ dependencies = [ [[package]] name = "pallet-staking-async" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -13149,7 +13150,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-ah-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13169,7 +13170,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-rc-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13186,7 +13187,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-reward-fn" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "sp-arithmetic", @@ -13195,7 +13196,7 @@ dependencies = [ [[package]] name = "pallet-staking-async-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -13205,7 +13206,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-fn" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "sp-arithmetic", @@ -13214,7 +13215,7 @@ dependencies = [ [[package]] name = "pallet-staking-runtime-api" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -13224,7 +13225,7 @@ dependencies = [ [[package]] name = "pallet-state-trie-migration" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13240,7 +13241,7 @@ dependencies = [ [[package]] name = "pallet-statement" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", @@ -13257,7 +13258,7 @@ dependencies = [ [[package]] name = "pallet-sudo" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13272,7 +13273,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13290,7 +13291,7 @@ dependencies = [ [[package]] name = "pallet-tips" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13308,7 +13309,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13324,7 +13325,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -13336,7 +13337,7 @@ dependencies = [ [[package]] name = "pallet-transaction-storage" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13355,7 +13356,7 @@ dependencies = [ [[package]] name = "pallet-treasury" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -13374,7 +13375,7 @@ dependencies = [ [[package]] name = "pallet-tx-pause" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "parity-scale-codec", @@ -13385,7 +13386,7 @@ dependencies = [ [[package]] name = "pallet-uniques" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13399,7 +13400,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13414,7 +13415,7 @@ dependencies = [ [[package]] name = "pallet-verify-signature" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13429,7 +13430,7 @@ dependencies = [ [[package]] name = "pallet-vesting" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13443,7 +13444,7 @@ dependencies = [ [[package]] name = "pallet-whitelist" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "polkadot-sdk-frame", @@ -13453,7 +13454,7 @@ dependencies = [ [[package]] name = "pallet-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bounded-collections 0.3.2", "frame-benchmarking", @@ -13477,7 +13478,7 @@ dependencies = [ [[package]] name = "pallet-xcm-benchmarks" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-benchmarking", "frame-support", @@ -13494,7 +13495,7 @@ dependencies = [ [[package]] name = "pallet-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-messages", "bp-runtime", @@ -13516,7 +13517,7 @@ dependencies = [ [[package]] name = "pallet-xcm-bridge-hub-router" version = "0.5.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-xcm-bridge-hub-router", "frame-benchmarking", @@ -13536,7 +13537,7 @@ dependencies = [ [[package]] name = "pallet-xcm-precompiles" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "pallet-revive", @@ -13550,7 +13551,7 @@ dependencies = [ [[package]] name = "parachains-common" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-utility", @@ -13581,7 +13582,7 @@ dependencies = [ [[package]] name = "parachains-runtimes-test-utils" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-pallet-parachain-system", "cumulus-pallet-xcmp-queue", @@ -14036,7 +14037,7 @@ dependencies = [ [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -14047,7 +14048,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bounded-collections 0.3.2", @@ -14064,7 +14065,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitvec", "bounded-collections 0.3.2", @@ -14093,7 +14094,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-common" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitvec", "frame-benchmarking", @@ -14142,7 +14143,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-metrics" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bs58", "frame-benchmarking", @@ -14154,7 +14155,7 @@ dependencies = [ [[package]] name = "polkadot-runtime-parachains" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitflags 1.3.2", "bitvec", @@ -14201,7 +14202,7 @@ dependencies = [ [[package]] name = "polkadot-sdk" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "asset-test-utils", "assets-common", @@ -14331,6 +14332,7 @@ dependencies = [ "pallet-referenda", "pallet-remark", "pallet-revive", + "pallet-revive-uapi", "pallet-root-offences", "pallet-root-testing", "pallet-safe-mode", @@ -14476,7 +14478,7 @@ dependencies = [ [[package]] name = "polkadot-sdk-frame" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-benchmarking", @@ -15694,7 +15696,7 @@ dependencies = [ [[package]] name = "revive-dev-runtime" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "parity-scale-codec", @@ -16052,7 +16054,7 @@ dependencies = [ [[package]] name = "rococo-runtime-constants" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "polkadot-primitives", @@ -16565,7 +16567,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "sp-core", @@ -16576,7 +16578,7 @@ dependencies = [ [[package]] name = "sc-basic-authorship" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "futures", "log", @@ -16598,7 +16600,7 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -16613,7 +16615,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "docify", @@ -16639,7 +16641,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -16650,7 +16652,7 @@ dependencies = [ [[package]] name = "sc-cli" version = "0.36.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bip39", @@ -16692,7 +16694,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "fnv", "futures", @@ -16718,7 +16720,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hash-db", "kvdb", @@ -16746,7 +16748,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -16769,7 +16771,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "fork-tree", @@ -16800,7 +16802,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "fork-tree", @@ -16837,7 +16839,7 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "fork-tree", "parity-scale-codec", @@ -16850,7 +16852,7 @@ dependencies = [ [[package]] name = "sc-consensus-manual-seal" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "assert_matches", "async-trait", @@ -16885,7 +16887,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -16908,7 +16910,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", @@ -16931,7 +16933,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "polkavm 0.26.0", "sc-allocator", @@ -16944,7 +16946,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "polkavm 0.26.0", @@ -16955,7 +16957,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "anyhow", "log", @@ -16971,7 +16973,7 @@ dependencies = [ [[package]] name = "sc-informant" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "console", "futures", @@ -16987,7 +16989,7 @@ dependencies = [ [[package]] name = "sc-keystore" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "parking_lot 0.12.4", @@ -17001,7 +17003,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "arrayvec 0.7.6", @@ -17029,7 +17031,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17079,7 +17081,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bitflags 1.3.2", "parity-scale-codec", @@ -17089,7 +17091,7 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17110,7 +17112,7 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -17145,7 +17147,7 @@ dependencies = [ [[package]] name = "sc-network-transactions" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "futures", @@ -17164,7 +17166,7 @@ dependencies = [ [[package]] name = "sc-network-types" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bs58", "bytes", @@ -17185,7 +17187,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -17194,7 +17196,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "futures", "jsonrpsee", @@ -17226,7 +17228,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "jsonrpsee", "parity-scale-codec", @@ -17246,7 +17248,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "dyn-clone", "forwarded-header-value", @@ -17270,7 +17272,7 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "futures", @@ -17303,7 +17305,7 @@ dependencies = [ [[package]] name = "sc-runtime-utilities" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sc-executor", @@ -17318,7 +17320,7 @@ dependencies = [ [[package]] name = "sc-service" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "directories", @@ -17382,7 +17384,7 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.30.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -17393,7 +17395,7 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "derive_more 0.99.20", "futures", @@ -17413,7 +17415,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "chrono", "futures", @@ -17432,7 +17434,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "chrono", "console", @@ -17460,7 +17462,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", @@ -17471,7 +17473,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -17502,7 +17504,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -17519,7 +17521,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-channel 1.9.0", "futures", @@ -18424,7 +18426,7 @@ checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "slot-range-helper" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "enumn", "parity-scale-codec", @@ -18606,7 +18608,7 @@ dependencies = [ [[package]] name = "snowbridge-core" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bp-relayers", "frame-support", @@ -18841,7 +18843,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "hash-db", @@ -18863,7 +18865,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "blake2 0.10.6", @@ -18877,7 +18879,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -18889,7 +18891,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "integer-sqrt", @@ -18903,7 +18905,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -18915,7 +18917,7 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-inherents", @@ -18925,7 +18927,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "futures", "parity-scale-codec", @@ -18944,7 +18946,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "futures", @@ -18958,7 +18960,7 @@ dependencies = [ [[package]] name = "sp-consensus-aura" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -18974,7 +18976,7 @@ dependencies = [ [[package]] name = "sp-consensus-babe" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -18992,7 +18994,7 @@ dependencies = [ [[package]] name = "sp-consensus-beefy" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19012,7 +19014,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "finality-grandpa", "log", @@ -19029,7 +19031,7 @@ dependencies = [ [[package]] name = "sp-consensus-pow" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "sp-api", @@ -19040,7 +19042,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19051,7 +19053,7 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ark-vrf", "array-bytes", @@ -19098,7 +19100,7 @@ dependencies = [ [[package]] name = "sp-core-hashing" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", ] @@ -19106,7 +19108,7 @@ dependencies = [ [[package]] name = "sp-core-hashing-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-crypto-hashing-proc-macro", ] @@ -19114,7 +19116,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ark-bls12-377 0.5.0", "ark-bls12-377-ext", @@ -19148,7 +19150,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "blake2b_simd", "byteorder", @@ -19161,7 +19163,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "quote", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk.git?branch=master)", @@ -19171,7 +19173,7 @@ dependencies = [ [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "kvdb", "parking_lot 0.12.4", @@ -19180,7 +19182,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "proc-macro2", "quote", @@ -19190,7 +19192,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "parity-scale-codec", @@ -19200,7 +19202,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.8.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19212,7 +19214,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -19225,7 +19227,7 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bytes", "docify", @@ -19251,7 +19253,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-core", "sp-runtime", @@ -19261,7 +19263,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "parking_lot 0.12.4", @@ -19272,7 +19274,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "thiserror 1.0.69", "zstd 0.12.4", @@ -19281,7 +19283,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-metadata", "parity-scale-codec", @@ -19291,7 +19293,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19302,7 +19304,7 @@ dependencies = [ [[package]] name = "sp-mmr-primitives" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "log", "parity-scale-codec", @@ -19319,7 +19321,7 @@ dependencies = [ [[package]] name = "sp-npos-elections" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19332,7 +19334,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-core", @@ -19342,7 +19344,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "backtrace", "regex", @@ -19351,7 +19353,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "rustc-hash 1.1.0", "serde", @@ -19361,7 +19363,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "binary-merkle-tree", "bytes", @@ -19391,7 +19393,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -19409,7 +19411,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "expander", @@ -19422,7 +19424,7 @@ dependencies = [ [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "scale-info", @@ -19436,7 +19438,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -19449,7 +19451,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hash-db", "log", @@ -19469,7 +19471,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "aes-gcm", "curve25519-dalek", @@ -19493,12 +19495,12 @@ dependencies = [ [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "impl-serde", "parity-scale-codec", @@ -19510,7 +19512,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -19522,7 +19524,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "regex", @@ -19534,7 +19536,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "sp-api", "sp-runtime", @@ -19543,7 +19545,7 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "parity-scale-codec", @@ -19557,7 +19559,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "ahash", "foldhash 0.1.5", @@ -19582,7 +19584,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "impl-serde", "parity-scale-codec", @@ -19599,7 +19601,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "parity-scale-codec", "proc-macro-warning", @@ -19611,7 +19613,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -19623,7 +19625,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "bounded-collections 0.3.2", "parity-scale-codec", @@ -19903,7 +19905,7 @@ dependencies = [ [[package]] name = "staging-parachain-info" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -19916,7 +19918,7 @@ dependencies = [ [[package]] name = "staging-xcm" version = "7.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "array-bytes", "bounded-collections 0.3.2", @@ -19937,7 +19939,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-support", @@ -19961,7 +19963,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "environmental", "frame-benchmarking", @@ -20145,7 +20147,7 @@ dependencies = [ [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "hmac 0.12.1", "pbkdf2 0.12.2", @@ -20170,7 +20172,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-support" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "jsonrpsee", @@ -20184,7 +20186,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "docify", "frame-system-rpc-runtime-api", @@ -20204,7 +20206,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "http-body-util", "hyper 1.6.0", @@ -20218,7 +20220,7 @@ dependencies = [ [[package]] name = "substrate-rpc-client" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "async-trait", "jsonrpsee", @@ -20242,7 +20244,7 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "build-helper", "cargo_metadata 0.15.4", @@ -20781,7 +20783,7 @@ checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "testnet-parachains-constants" version = "1.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "cumulus-primitives-core", "frame-support", @@ -22654,7 +22656,7 @@ dependencies = [ [[package]] name = "westend-runtime-constants" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "polkadot-primitives", @@ -23335,7 +23337,7 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "Inflector", "proc-macro2", @@ -23346,7 +23348,7 @@ dependencies = [ [[package]] name = "xcm-runtime-apis" version = "0.1.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "parity-scale-codec", @@ -23360,7 +23362,7 @@ dependencies = [ [[package]] name = "xcm-simulator" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#ef07f24b0cd49a52d1a5a0f3a42ea8d714187f18" +source = "git+https://github.com/paritytech/polkadot-sdk.git?branch=master#30cda2aad8612a10ff729d494acd9d5353294d63" dependencies = [ "frame-support", "frame-system", diff --git a/Cargo.toml b/Cargo.toml index 7577bbb33cb6d..35bca7410ddae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -216,6 +216,17 @@ revive-env = { path = "crates/revive-env" } revive-strategy = { path = "crates/revive-strategy" } revive-utils = { path = "crates/revive-utils" } +# polkadot-sdk +polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ + "experimental", + "runtime", + "polkadot-runtime-common", + "pallet-revive", + "pallet-balances", + "pallet-timestamp", + "pallet-revive-uapi", +]} + # solc & compilation utilities foundry-block-explorers = { version = "0.20.0", default-features = false } foundry-compilers = { version = "0.18.2", default-features = false } diff --git a/crates/anvil-polkadot/src/api_server/server.rs b/crates/anvil-polkadot/src/api_server/server.rs index 4fd7662734c4a..1198dbe7ece09 100644 --- a/crates/anvil-polkadot/src/api_server/server.rs +++ b/crates/anvil-polkadot/src/api_server/server.rs @@ -41,6 +41,7 @@ use alloy_serde::WithOtherFields; use alloy_trie::{EMPTY_ROOT_HASH, KECCAK_EMPTY, TrieAccount}; use anvil_core::eth::{EthRequest, Params as MineParams}; use anvil_rpc::response::ResponseResult; +use chrono::{DateTime, Datelike, Utc}; use codec::{Decode, Encode}; use futures::{StreamExt, channel::mpsc}; use indexmap::IndexMap; @@ -66,12 +67,12 @@ use polkadot_sdk::{ sp_api::{Metadata as _, ProvideRuntimeApi}, sp_blockchain::Info, sp_core::{self, Hasher, keccak_256}, - sp_runtime::traits::BlakeTwo256, + sp_runtime::{FixedU128, traits::BlakeTwo256}, }; use revm::primitives::hardfork::SpecId; use sqlx::sqlite::SqlitePoolOptions; use std::{collections::HashSet, sync::Arc, time::Duration}; -use substrate_runtime::Balance; +use substrate_runtime::{Balance, constants::NATIVE_TO_ETH_RATIO}; use subxt::{ Metadata as SubxtMetadata, OnlineClient, backend::rpc::RpcClient, client::RuntimeVersion as SubxtRuntimeVersion, config::substrate::H256, @@ -118,6 +119,7 @@ impl ApiServer { substrate_service.spawn_handle.clone(), ) .await?; + Ok(Self { block_provider, req_receiver, @@ -148,6 +150,17 @@ impl ApiServer { pub async fn execute(&mut self, req: EthRequest) -> ResponseResult { let res = match req.clone() { EthRequest::SetLogging(enabled) => self.set_logging(enabled).to_rpc_result(), + //------- Gas ----------- + EthRequest::SetNextBlockBaseFeePerGas(base_fee) => { + let latest_block = self.latest_block(); + // We inject in substrate storage an 1e18 denominated value after transforming it + // to a 1e12. + self.backend.inject_next_fee_multiplier( + latest_block, + FixedU128::from_rational(base_fee.to::(), NATIVE_TO_ETH_RATIO.into()), + ); + Ok(()).to_rpc_result() + } //------- Mining--------- EthRequest::Mine(blocks, interval) => self.mine(blocks, interval).await.to_rpc_result(), @@ -390,10 +403,17 @@ impl ApiServer { "The interval between blocks is too large".to_string(), )); } - self.mining_engine + + // Subscribe to new best blocks. + let receiver = self.eth_rpc_client.block_notifier().map(|sender| sender.subscribe()); + + let awaited_hash = self + .mining_engine .mine(blocks.map(|b| b.to()), interval.map(|i| Duration::from_secs(i.to()))) .await - .map_err(Error::Mining) + .map_err(Error::Mining)?; + self.wait_for_hash(receiver, awaited_hash).await?; + Ok(()) } fn set_interval_mining(&self, interval: u64) -> Result<()> { @@ -425,7 +445,10 @@ impl ApiServer { async fn evm_mine(&self, mine: Option>>) -> Result { node_info!("evm_mine"); - self.mining_engine.evm_mine(mine.and_then(|p| p.params)).await?; + // Subscribe to new best blocks. + let receiver = self.eth_rpc_client.block_notifier().map(|sender| sender.subscribe()); + let awaited_hash = self.mining_engine.evm_mine(mine.and_then(|p| p.params)).await?; + self.wait_for_hash(receiver, awaited_hash).await?; Ok("0x0".to_string()) } @@ -434,7 +457,15 @@ impl ApiServer { mine: Option>>, ) -> Result> { node_info!("evm_mine_detailed"); - let mined_blocks = self.mining_engine.do_evm_mine(mine.and_then(|p| p.params)).await?; + + // Subscribe to new best blocks. + let receiver = self.eth_rpc_client.block_notifier().map(|sender| sender.subscribe()); + + let (mined_blocks, awaited_hash) = + self.mining_engine.do_evm_mine(mine.and_then(|p| p.params)).await?; + + self.wait_for_hash(receiver, awaited_hash).await?; + let mut blocks = Vec::with_capacity(mined_blocks as usize); let last_block = self.client.info().best_number as u64; let starting = last_block - mined_blocks + 1; @@ -612,28 +643,37 @@ impl ApiServer { async fn estimate_gas( &self, request: WithOtherFields, - block: Option, + block: Option, ) -> Result { node_info!("eth_estimateGas"); let hash = self.get_block_hash_for_tag(block).await?; let runtime_api = self.eth_rpc_client.runtime_api(hash); - let dry_run = - runtime_api.dry_run(convert_to_generic_transaction(request.into_inner())).await?; + let dry_run = runtime_api + .dry_run( + convert_to_generic_transaction(request.into_inner()), + ReviveBlockId::from(block).inner(), + ) + .await?; Ok(dry_run.eth_gas) } async fn call( &self, request: WithOtherFields, - block: Option, + block: Option, ) -> Result { node_info!("eth_call"); let hash = self.get_block_hash_for_tag(block).await?; + let runtime_api = self.eth_rpc_client.runtime_api(hash); - let dry_run = - runtime_api.dry_run(convert_to_generic_transaction(request.into_inner())).await?; + let dry_run = runtime_api + .dry_run( + convert_to_generic_transaction(request.into_inner()), + ReviveBlockId::from(block).inner(), + ) + .await?; Ok(dry_run.data.into()) } @@ -699,9 +739,11 @@ impl ApiServer { if transaction.gas_price.is_none() { transaction.gas_price = Some(self.gas_price().await?); } + if transaction.nonce.is_none() { transaction.nonce = Some(self.get_transaction_count(from, latest_block_id).await?); } + if transaction.chain_id.is_none() { transaction.chain_id = Some(sp_core::U256::from_big_endian(&self.chain_id(latest_block).to_be_bytes())); @@ -1425,18 +1467,18 @@ impl ApiServer { let mut invalid_txs = IndexMap::new(); for tx in self.tx_pool.ready() { - if let Some(sender) = extract_sender(tx.data()) { - if sender == address { - invalid_txs.insert(*tx.hash(), None); - } + if let Some(sender) = extract_sender(tx.data()) + && sender == address + { + invalid_txs.insert(*tx.hash(), None); } } for tx in self.tx_pool.futures() { - if let Some(sender) = extract_sender(tx.data()) { - if sender == address { - invalid_txs.insert(*tx.hash(), None); - } + if let Some(sender) = extract_sender(tx.data()) + && sender == address + { + invalid_txs.insert(*tx.hash(), None); } } @@ -1446,6 +1488,59 @@ impl ApiServer { Ok(()) } + + async fn wait_for_hash( + &self, + receiver: Option>, + awaited_hash: H256, + ) -> Result<()> { + if let Some(mut receiver) = receiver { + tokio::time::timeout(Duration::from_secs(3), async { + loop { + if let Ok(block_hash) = receiver.recv().await { + if let Err(e) = self.log_mined_block(block_hash).await { + node_info!("Failed to log mined block {block_hash:?}: {e:?}"); + } + if block_hash == awaited_hash { + break; + } + } + } + }) + .await + .map_err(|e| { + Error::InternalError(format!( + "Was not notified about the new best block in time {e:?}." + )) + })?; + } + Ok(()) + } + + async fn log_mined_block(&self, block_hash: H256) -> Result<()> { + let block_timestamp = self.backend.read_timestamp(block_hash)?; + let block_number = self.backend.read_block_number(block_hash)?; + let timestamp = utc_from_millis(block_timestamp)?; + node_info!(" Block Number: {}", block_number); + node_info!(" Block Hash: {:?}", block_hash); + if timestamp.year() > 9999 { + // rf2822 panics with more than 4 digits + node_info!(" Block Time: {:?}\n", timestamp.to_rfc3339()); + } else { + node_info!(" Block Time: {:?}\n", timestamp.to_rfc2822()); + } + Ok(()) + } +} + +/// Returns the `Utc` datetime for the given seconds since unix epoch +fn utc_from_millis(millis: u64) -> Result> { + DateTime::from_timestamp_millis( + millis.try_into().map_err(|err| { + Error::InvalidParams(format!("Could not convert the timestamp: {err:?}")) + })?, + ) + .ok_or(Error::InvalidParams("Could not get the utc datetime 😭".to_string())) } fn new_contract_info(address: &Address, code_hash: H256, nonce: Nonce) -> ContractInfo { @@ -1572,9 +1667,12 @@ async fn create_revive_rpc_client( .await .map_err(|err| Error::ReviveRpc(EthRpcError::ClientError(ClientError::SqlxError(err))))?; - let eth_rpc_client = EthRpcClient::new(api, rpc_client, rpc, block_provider, receipt_provider) - .await - .map_err(Error::from)?; + let mut eth_rpc_client = + EthRpcClient::new(api, rpc_client, rpc, block_provider, receipt_provider) + .await + .map_err(Error::from)?; + + eth_rpc_client.create_block_notifier(); let eth_rpc_client_clone = eth_rpc_client.clone(); task_spawn_handle.spawn("block-subscription", "None", async move { let eth_rpc_client = eth_rpc_client_clone; diff --git a/crates/anvil-polkadot/src/cmd.rs b/crates/anvil-polkadot/src/cmd.rs index b76ba0d54b5d1..77aa9c337628d 100644 --- a/crates/anvil-polkadot/src/cmd.rs +++ b/crates/anvil-polkadot/src/cmd.rs @@ -110,7 +110,6 @@ impl NodeArgs { let anvil_config = AnvilNodeConfig::default() .with_gas_limit(self.evm.gas_limit) .disable_block_gas_limit(self.evm.disable_block_gas_limit) - .with_gas_price(self.evm.gas_price) .with_blocktime(self.block_time) .with_no_mining(self.no_mining) .with_mixed_mining(self.mixed_mining, self.block_time) @@ -262,10 +261,6 @@ pub struct AnvilEvmArgs { )] pub disable_code_size_limit: bool, - /// The gas price. - #[arg(long, help_heading = "Environment config")] - pub gas_price: Option, - /// The base fee in a block. #[arg( long, diff --git a/crates/anvil-polkadot/src/config.rs b/crates/anvil-polkadot/src/config.rs index b0fc99058cd7f..18882e90a8f36 100644 --- a/crates/anvil-polkadot/src/config.rs +++ b/crates/anvil-polkadot/src/config.rs @@ -1,7 +1,4 @@ -use crate::{ - api_server::revive_conversions::ReviveAddress, - substrate_node::chain_spec::keypairs_from_private_keys, -}; +use crate::api_server::revive_conversions::ReviveAddress; use alloy_genesis::Genesis; use alloy_primitives::{Address, TxHash, U256, hex, map::HashMap, utils::Unit}; use alloy_signer::Signer; @@ -51,11 +48,9 @@ pub const DEFAULT_MNEMONIC: &str = "test test test test test test test test test pub const DEFAULT_IPC_ENDPOINT: &str = if cfg!(unix) { "/tmp/anvil.ipc" } else { r"\\.\pipe\anvil.ipc" }; -/// Initial base fee for EIP-1559 blocks. -pub const INITIAL_BASE_FEE: u64 = 1_000_000_000; - -/// Initial default gas price for the first block -pub const INITIAL_GAS_PRICE: u128 = 1_875_000_000; +/// In anvil this is `1_000_000_000`, in 1e18 denomination. However, +/// asset-hub-westend runtime sets it to `1_000_000`. +pub const INITIAL_BASE_FEE: u128 = 1_000_000; const BANNER: &str = r" _ _ @@ -275,10 +270,8 @@ pub struct AnvilNodeConfig { pub gas_limit: Option, /// If set to `true`, disables the block gas limit pub disable_block_gas_limit: bool, - /// Default gas price for all txs - pub gas_price: Option, /// Default base fee - pub base_fee: Option, + pub base_fee: Option, /// If set to `true`, disables the enforcement of a minimum suggested priority fee pub disable_min_priority_fee: bool, /// Signer accounts that will be initialised with `genesis_balance` in the genesis block @@ -493,7 +486,6 @@ Genesis Number "private_keys": private_keys, "wallet": wallet_description, "base_fee": format!("{}", self.get_base_fee()), - "gas_price": format!("{}", self.get_gas_price()), "gas_limit": gas_limit, "genesis_timestamp": format!("{}", self.get_genesis_timestamp()), }) @@ -532,7 +524,6 @@ impl Default for AnvilNodeConfig { chain_id: None, gas_limit: None, disable_block_gas_limit: false, - gas_price: None, signer_accounts: genesis_accounts.clone(), genesis_timestamp: None, genesis_block_number: None, @@ -577,18 +568,19 @@ impl AnvilNodeConfig { self.memory_limit = mems_value; self } + /// Returns the base fee to use - pub fn get_base_fee(&self) -> u64 { + pub fn get_base_fee(&self) -> u128 { self.base_fee - .or_else(|| self.genesis.as_ref().and_then(|g| g.base_fee_per_gas.map(|g| g as u64))) + .or_else(|| { + self.genesis.as_ref().and_then(|g| { + // The base fee received via CLI will be transformed to 1e-12. + g.base_fee_per_gas + }) + }) .unwrap_or(INITIAL_BASE_FEE) } - /// Returns the base fee to use - pub fn get_gas_price(&self) -> u128 { - self.gas_price.unwrap_or(INITIAL_GAS_PRICE) - } - /// Sets a custom code size limit #[must_use] pub fn with_code_size_limit(mut self, code_size_limit: Option) -> Self { @@ -639,17 +631,10 @@ impl AnvilNodeConfig { self } - /// Sets the gas price - #[must_use] - pub fn with_gas_price(mut self, gas_price: Option) -> Self { - self.gas_price = gas_price; - self - } - /// Sets the base fee #[must_use] pub fn with_base_fee(mut self, base_fee: Option) -> Self { - self.base_fee = base_fee; + self.base_fee = base_fee.map(|bf| bf.into()); self } @@ -1028,3 +1013,15 @@ impl AccountGenerator { Ok(wallets) } } + +fn keypairs_from_private_keys( + accounts: &[PrivateKeySigner], +) -> Result, subxt_signer::eth::Error> { + accounts + .iter() + .map(|signer| { + let key = Keypair::from_secret_key(signer.credential().to_bytes().into())?; + Ok(key) + }) + .collect() +} diff --git a/crates/anvil-polkadot/src/substrate_node/chain_spec.rs b/crates/anvil-polkadot/src/substrate_node/chain_spec.rs index 3c7fc16920ded..7cf58735cd0f5 100644 --- a/crates/anvil-polkadot/src/substrate_node/chain_spec.rs +++ b/crates/anvil-polkadot/src/substrate_node/chain_spec.rs @@ -1,16 +1,35 @@ use crate::substrate_node::genesis::GenesisConfig; -use alloy_signer_local::PrivateKeySigner; +use codec::{Decode, Encode}; use polkadot_sdk::{ - sc_chain_spec::{ChainSpec, GetExtension}, + sc_chain_spec::{ChainSpec, GetExtension, json_patch}, + sc_executor, sc_executor::HostFunctions, sc_network::config::MultiaddrWithPeerId, sc_service::{ChainType, GenericChainSpec, Properties}, sc_telemetry::TelemetryEndpoints, - sp_core::storage::Storage, + sp_core::{ + storage::Storage, + traits::{CallContext, CodeExecutor, Externalities, FetchRuntimeCode, RuntimeCode}, + }, + sp_genesis_builder::Result as BuildResult, + sp_io::{self, hashing::blake2_256}, sp_runtime::BuildStorage, + sp_state_machine::BasicExternalities, }; -use substrate_runtime::WASM_BINARY; -use subxt_signer::eth::Keypair; +use serde_json::Value; +use std::borrow::Cow; + +pub fn development_chain_spec( + genesis_config: GenesisConfig, +) -> Result { + let inner = GenericChainSpec::builder(&genesis_config.code, Default::default()) + .with_name("Development") + .with_id("dev") + .with_chain_type(ChainType::Development) + .with_properties(props()) + .build(); + Ok(DevelopmentChainSpec { inner, genesis_config }) +} /// This is a wrapper around the general Substrate ChainSpec type that allows manual changes to the /// genesis block. @@ -26,8 +45,20 @@ where GenericChainSpec: BuildStorage, { fn assimilate_storage(&self, storage: &mut Storage) -> Result<(), String> { - self.inner.assimilate_storage(storage)?; storage.top.extend(self.genesis_config.as_storage_key_value()); + + // We need to initialise the storage used when calling into the runtime for the genesis + // config, so that the customised items (like block number and timestamp) will be + // seen even in the code that processes the genesis config patch. + let temp_storage = storage.clone(); + + GenesisBuilderRuntimeCaller::::new(&self.genesis_config.code[..]) + .get_storage_for_patch( + self.genesis_config.runtime_genesis_config_patch(), + temp_storage, + )? + .assimilate_storage(storage)?; + Ok(()) } } @@ -105,35 +136,108 @@ where fn props() -> Properties { let mut properties = Properties::new(); properties.insert("tokenDecimals".to_string(), 12.into()); - properties.insert("tokenSymbol".to_string(), "MINI".into()); + properties.insert("tokenSymbol".to_string(), "DOT".into()); properties } -pub fn development_chain_spec( - genesis_config: GenesisConfig, -) -> Result { - let inner = GenericChainSpec::builder( - WASM_BINARY.expect("Development wasm not available"), - Default::default(), - ) - .with_name("Development") - .with_id("dev") - .with_chain_type(ChainType::Development) - .with_genesis_config_patch(genesis_config.runtime_genesis_config_patch()) - .with_properties(props()) - .build(); - Ok(DevelopmentChainSpec { inner, genesis_config }) +// This mostly copies the upstream `GenesisConfigBuilderRuntimeCaller`, but with the ability of +// injecting genesis state even before the genesis config builders in the runtime are run via +// `GenesisBuilder_build_state` +struct GenesisBuilderRuntimeCaller<'a, EHF = ()> +where + EHF: HostFunctions, +{ + code: Cow<'a, [u8]>, + code_hash: Vec, + executor: sc_executor::WasmExecutor<(sp_io::SubstrateHostFunctions, EHF)>, } -pub fn keypairs_from_private_keys( - accounts: &[PrivateKeySigner], -) -> Result, subxt_signer::eth::Error> { - accounts - .iter() - .map(|signer| { - let key = - subxt_signer::eth::Keypair::from_secret_key(signer.credential().to_bytes().into())?; - Ok(key) - }) - .collect() +impl<'a, EHF> FetchRuntimeCode for GenesisBuilderRuntimeCaller<'a, EHF> +where + EHF: HostFunctions, +{ + fn fetch_runtime_code(&self) -> Option> { + Some(self.code.as_ref().into()) + } +} + +impl<'a, EHF> GenesisBuilderRuntimeCaller<'a, EHF> +where + EHF: HostFunctions, +{ + fn new(code: &'a [u8]) -> Self { + GenesisBuilderRuntimeCaller { + code: code.into(), + code_hash: blake2_256(code).to_vec(), + executor: sc_executor::WasmExecutor::<(sp_io::SubstrateHostFunctions, EHF)>::builder() + .with_allow_missing_host_functions(true) + .build(), + } + } + + fn get_storage_for_patch( + &self, + patch: Value, + genesis_storage: Storage, + ) -> core::result::Result { + let mut config = self.get_named_preset(None)?; + json_patch::merge(&mut config, patch); + self.get_storage_for_config(config, genesis_storage) + } + + fn call( + &self, + ext: &mut dyn Externalities, + method: &str, + data: &[u8], + ) -> sc_executor::error::Result> { + self.executor + .call( + ext, + &RuntimeCode { heap_pages: None, code_fetcher: self, hash: self.code_hash.clone() }, + method, + data, + CallContext::Offchain, + ) + .0 + } + + fn get_named_preset(&self, id: Option<&String>) -> core::result::Result { + let mut t = BasicExternalities::new_empty(); + let call_result = self + .call(&mut t, "GenesisBuilder_get_preset", &id.encode()) + .map_err(|e| format!("wasm call error {e}"))?; + + let named_preset = Option::>::decode(&mut &call_result[..]) + .map_err(|e| format!("scale codec error: {e}"))?; + + if let Some(named_preset) = named_preset { + Ok(serde_json::from_slice(&named_preset[..]).expect("returned value is json. qed.")) + } else { + Err(format!("The preset with name {id:?} is not available.")) + } + } + + fn get_storage_for_config( + &self, + config: Value, + genesis_storage: Storage, + ) -> core::result::Result { + // This is the key difference compared to the upstream variant, we don't initialise the + // storage as empty. + let mut ext = BasicExternalities::new(genesis_storage); + + let json_pretty_str = serde_json::to_string_pretty(&config) + .map_err(|e| format!("json to string failed: {e}"))?; + + let call_result = self + .call(&mut ext, "GenesisBuilder_build_state", &json_pretty_str.encode()) + .map_err(|e| format!("wasm call error {e}"))?; + + BuildResult::decode(&mut &call_result[..]) + .map_err(|e| format!("scale codec error: {e}"))? + .map_err(|e| format!("{e} for blob:\n{json_pretty_str}"))?; + + Ok(ext.into_storages()) + } } diff --git a/crates/anvil-polkadot/src/substrate_node/genesis.rs b/crates/anvil-polkadot/src/substrate_node/genesis.rs index 98907edbc1614..0777218ea8af3 100644 --- a/crates/anvil-polkadot/src/substrate_node/genesis.rs +++ b/crates/anvil-polkadot/src/substrate_node/genesis.rs @@ -13,15 +13,16 @@ use polkadot_sdk::{ sc_client_api::{BlockImportOperation, backend::Backend}, sc_executor::RuntimeVersionOf, sp_blockchain, - sp_core::{H160, storage::Storage}, + sp_core::{self, H160, storage::Storage}, sp_runtime::{ - BuildStorage, + BuildStorage, FixedU128, traits::{Block as BlockT, Hash as HashT, HashingFor, Header as HeaderT}, }, }; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use std::{collections::BTreeMap, marker::PhantomData, sync::Arc}; +use substrate_runtime::{WASM_BINARY, constants::NATIVE_TO_ETH_RATIO}; use subxt_signer::eth::Keypair; /// Genesis settings @@ -37,7 +38,7 @@ pub struct GenesisConfig { /// The initial number for the genesis block pub number: u32, /// The genesis header base fee - pub base_fee_per_gas: u64, + pub base_fee_per_gas: FixedU128, /// The genesis header gas limit. pub gas_limit: Option, /// Signer accounts from account_generator @@ -46,6 +47,8 @@ pub struct GenesisConfig { pub genesis_balance: U256, /// Coinbase address pub coinbase: Option
, + /// Substrate runtime code + pub code: Vec, } impl<'a> From<&'a AnvilNodeConfig> for GenesisConfig { @@ -62,11 +65,15 @@ impl<'a> From<&'a AnvilNodeConfig> for GenesisConfig { .get_genesis_number() .try_into() .expect("Genesis block number overflow"), - base_fee_per_gas: anvil_config.get_base_fee(), + base_fee_per_gas: FixedU128::from_rational( + anvil_config.get_base_fee(), + NATIVE_TO_ETH_RATIO.into(), + ), gas_limit: anvil_config.gas_limit, genesis_accounts: anvil_config.genesis_accounts.clone(), genesis_balance: anvil_config.genesis_balance, coinbase: anvil_config.genesis.as_ref().map(|g| g.coinbase), + code: WASM_BINARY.expect("Development wasm not available").to_vec(), } } } @@ -94,8 +101,8 @@ impl GenesisConfig { (well_known_keys::TIMESTAMP.to_vec(), self.timestamp.encode()), (well_known_keys::BLOCK_NUMBER_KEY.to_vec(), self.number.encode()), (well_known_keys::AURA_AUTHORITIES.to_vec(), vec![aura_authority_id].encode()), + (sp_core::storage::well_known_keys::CODE.to_vec(), self.code.clone()), ]; - // TODO: add other fields storage } @@ -151,6 +158,9 @@ impl GenesisConfig { "revive": { "accounts": revive_genesis_accounts, }, + "transactionPayment": { + "multiplier": self.base_fee_per_gas.into_inner().to_string(), + } }) } } @@ -185,7 +195,7 @@ impl, E: RuntimeVersionOf> ) } - pub fn new_with_storage( + fn new_with_storage( genesis_number: u64, genesis_storage: Storage, commit_genesis_state: bool, @@ -261,11 +271,13 @@ mod tests { let timestamp: u64 = 10; let chain_id: u64 = 42; let authority_id: [u8; 32] = [0xEE; 32]; + let base_fee_per_gas = FixedU128::from_rational(6_000_000, NATIVE_TO_ETH_RATIO.into()); let genesis_config = GenesisConfig { number: block_number, timestamp, chain_id, coinbase: Some(Address::from([0xEE; 20])), + base_fee_per_gas, ..Default::default() }; let genesis_storage = genesis_config.as_storage_key_value(); diff --git a/crates/anvil-polkadot/src/substrate_node/mining_engine.rs b/crates/anvil-polkadot/src/substrate_node/mining_engine.rs index 4b61c21d4e1a6..1070a1644a1fa 100644 --- a/crates/anvil-polkadot/src/substrate_node/mining_engine.rs +++ b/crates/anvil-polkadot/src/substrate_node/mining_engine.rs @@ -11,7 +11,7 @@ use parking_lot::RwLock; use polkadot_sdk::{ sc_consensus_manual_seal::{CreatedBlock, EngineCommand, Error as BlockProducingError}, sc_service::TransactionPool, - sp_core, + sp_core::{self, H256}, }; use std::{pin::Pin, sync::Arc}; use substrate_runtime::Hash; @@ -134,21 +134,22 @@ impl MiningEngine { /// * `interval` - Optional time to advance between blocks (in seconds) /// /// # Returns - /// * `Ok(())` - All blocks were mined successfully + /// * `Ok(H256)` - The hash of the last block mined successfully. /// * `Err(MiningError)` - Block production failed pub async fn mine( &self, num_blocks: Option, interval: Option, - ) -> Result<(), MiningError> { + ) -> Result { let blocks = num_blocks.unwrap_or(1); + let mut last_hash = H256::zero(); for _ in 0..blocks { if let Some(interval) = interval { self.time_manager.increase_time(interval.as_secs()); } - seal_now(&self.seal_command_sender).await?; + last_hash = seal_now(&self.seal_command_sender).await?.hash; } - Ok(()) + Ok(last_hash) } /// Ethereum-compatible block mining RPC method. @@ -161,10 +162,10 @@ impl MiningEngine { /// * `opts` - Optional mining parameters including timestamp and block count /// /// # Returns - /// * `Ok(())` - Success response + /// * `Ok(H256)` - The hash of the last block mined successfully. /// * `Err(MiningError)` - Mining operation failed - pub async fn evm_mine(&self, opts: Option) -> Result<(), MiningError> { - self.do_evm_mine(opts).await.map(|_| ()) + pub async fn evm_mine(&self, opts: Option) -> Result { + self.do_evm_mine(opts).await.map(|res| res.1) } /// Configure interval-based mining mode. @@ -311,8 +312,9 @@ impl MiningEngine { self.waker.wake(); } - pub async fn do_evm_mine(&self, opts: Option) -> Result { + pub async fn do_evm_mine(&self, opts: Option) -> Result<(u64, H256), MiningError> { let mut blocks_to_mine = 1u64; + let mut last_hash = H256::zero(); if let Some(opts) = opts { let timestamp = match opts { @@ -333,10 +335,10 @@ impl MiningEngine { } for _ in 0..blocks_to_mine { - seal_now(&self.seal_command_sender).await?; + last_hash = seal_now(&self.seal_command_sender).await?.hash; } - Ok(blocks_to_mine) + Ok((blocks_to_mine, last_hash)) } } diff --git a/crates/anvil-polkadot/src/substrate_node/service/backend.rs b/crates/anvil-polkadot/src/substrate_node/service/backend.rs index 331bedf7b748c..a39b59a08dbca 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/backend.rs @@ -13,6 +13,7 @@ use polkadot_sdk::{ sp_blockchain, sp_core::{H160, H256}, sp_io::hashing::blake2_256, + sp_runtime::FixedU128, sp_state_machine::{StorageKey, StorageValue}, }; use std::{collections::HashMap, num::NonZeroUsize, sync::Arc}; @@ -30,6 +31,10 @@ pub enum BackendError { MissingAuraAuthorities, #[error("Could not find timestamp in the state")] MissingTimestamp, + #[error("Could not find the next fee multiplier in the state")] + MissingNextFeeMultiplier, + #[error("Could not find block number in the state")] + MissingBlockNumber, #[error("Unable to decode total issuance {0}")] DecodeTotalIssuance(codec::Error), #[error("Unable to decode chain id {0}")] @@ -44,8 +49,12 @@ pub enum BackendError { DecodeCodeInfo(codec::Error), #[error("Unable to decode timestamp: {0}")] DecodeTimestamp(codec::Error), + #[error("Unable to decode blockNumber: {0}")] + DecodeBlockNumber(codec::Error), #[error("Unable to decode aura authorities: {0}")] DecodeAuraAuthorities(codec::Error), + #[error("Unable to decode the next fee multiplier: {0}")] + DecodeNextFeeMultiplier(codec::Error), } type Result = std::result::Result; @@ -72,6 +81,13 @@ impl BackendWithOverlay { u64::decode(&mut &value[..]).map_err(BackendError::DecodeTimestamp) } + pub fn read_block_number(&self, hash: Hash) -> Result { + let key = well_known_keys::BLOCK_NUMBER_KEY; + let value = + self.read_top_state(hash, key.to_vec())?.ok_or(BackendError::MissingBlockNumber)?; + u32::decode(&mut &value[..]).map_err(BackendError::DecodeBlockNumber) + } + pub fn read_chain_id(&self, hash: Hash) -> Result { let key = well_known_keys::CHAIN_ID; @@ -164,6 +180,11 @@ impl BackendWithOverlay { overrides.set_coinbase(at, aura_authority); } + pub fn inject_next_fee_multiplier(&self, at: Hash, next_fee_multiplier: FixedU128) { + let mut overrides = self.overrides.lock(); + overrides.set_next_fee_multiplier(at, next_fee_multiplier); + } + pub fn inject_total_issuance(&self, at: Hash, value: Balance) { let mut overrides = self.overrides.lock(); overrides.set_total_issuance(at, value); @@ -263,6 +284,16 @@ impl StorageOverrides { self.add(latest_block, changeset); } + fn set_next_fee_multiplier(&mut self, latest_block: Hash, next_fee_multiplier: FixedU128) { + let mut changeset = BlockOverrides::default(); + changeset.top.insert( + well_known_keys::NEXT_FEE_MULTIPLIER.to_vec(), + Some(next_fee_multiplier.encode()), + ); + + self.add(latest_block, changeset); + } + fn set_system_account_info( &mut self, latest_block: Hash, diff --git a/crates/anvil-polkadot/src/substrate_node/service/storage.rs b/crates/anvil-polkadot/src/substrate_node/service/storage.rs index 57c5bdad9de1a..a6553cead52c8 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/storage.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/storage.rs @@ -48,6 +48,12 @@ pub mod well_known_keys { 154, 166, 12, 2, 190, 154, 220, 201, 138, 13, 29, ]; + //twox_128(b"TransactionPayment" + b"NextFeeMultiplier") + pub const NEXT_FEE_MULTIPLIER: [u8; 32] = [ + 63, 20, 103, 160, 150, 188, 215, 26, 91, 106, 12, 129, 85, 226, 8, 16, 63, 46, 223, 59, + 223, 56, 29, 235, 227, 49, 171, 116, 70, 173, 223, 220, + ]; + pub fn system_account_info(account_id: AccountId) -> Vec { let mut key = Vec::new(); key.extend_from_slice(&twox_128("System".as_bytes())); diff --git a/crates/anvil-polkadot/substrate-runtime/Cargo.toml b/crates/anvil-polkadot/substrate-runtime/Cargo.toml index 130e21313a834..f3e1b561a3c3b 100644 --- a/crates/anvil-polkadot/substrate-runtime/Cargo.toml +++ b/crates/anvil-polkadot/substrate-runtime/Cargo.toml @@ -21,6 +21,7 @@ polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parachains-common", + "polkadot-runtime-common", "runtime", "sp-consensus-aura", "with-tracing", diff --git a/crates/anvil-polkadot/substrate-runtime/src/lib.rs b/crates/anvil-polkadot/substrate-runtime/src/lib.rs index 9dcb200b5a2f0..8fc4409422450 100644 --- a/crates/anvil-polkadot/substrate-runtime/src/lib.rs +++ b/crates/anvil-polkadot/substrate-runtime/src/lib.rs @@ -22,12 +22,13 @@ use pallet_revive::{ runtime::EthExtra, }, }; -use pallet_transaction_payment::{ConstFeeMultiplier, FeeDetails, Multiplier, RuntimeDispatchInfo}; +use pallet_transaction_payment::{FeeDetails, RuntimeDispatchInfo}; use polkadot_sdk::{ parachains_common::{ AccountId, AssetHubPolkadotAuraId as AuraId, BlockNumber, Hash as CommonHash, Header, Nonce, Signature, }, + polkadot_runtime_common::SlowAdjustingFeeUpdate, polkadot_sdk_frame::{ deps::sp_genesis_builder, runtime::{apis, prelude::*}, @@ -41,6 +42,11 @@ use polkadot_sdk::{ pub use polkadot_sdk::parachains_common::Balance; use sp_weights::ConstantMultiplier; +pub mod constants { + /// DOT precision (1e12) to ETH precision (1e18) ratio. + pub const NATIVE_TO_ETH_RATIO: u32 = 1_000_000; +} + pub mod currency { use super::Balance; pub const DOLLARS: Balance = 1_000_000_000_000; @@ -257,17 +263,27 @@ impl pallet_sudo::Config for Runtime {} impl pallet_timestamp::Config for Runtime {} parameter_types! { - pub const TransactionByteFee: Balance = 10 * MILLICENTS; - pub FeeMultiplier: Multiplier = Multiplier::one(); + // That's how asset-hub-westend sets this. + pub const TransactionByteFee: Balance = MILLICENTS; } +// That's how asset-hub-westend sets this. +pub type WeightToFee = BlockRatioFee< + // p + CENTS, + // q + { 100 * ExtrinsicBaseWeight::get().ref_time() as u128 }, + Runtime, +>; + // Implements the types required for the transaction payment pallet. #[derive_impl(pallet_transaction_payment::config_preludes::TestDefaultConfig)] impl pallet_transaction_payment::Config for Runtime { type OnChargeTransaction = pallet_transaction_payment::FungibleAdapter; - type WeightToFee = BlockRatioFee<1, 1, Self>; + type WeightToFee = WeightToFee; type LengthToFee = ConstantMultiplier; - type FeeMultiplierUpdate = ConstFeeMultiplier; + // That's how asset-hub-westend sets this. + type FeeMultiplierUpdate = SlowAdjustingFeeUpdate; } parameter_types! { @@ -299,7 +315,7 @@ impl pallet_revive::Config for Runtime { // `forking` feature. type FindAuthor = BlockAuthor; type Balance = Balance; - type NativeToEthRatio = ConstU32<1_000_000>; + type NativeToEthRatio = ConstU32<{ constants::NATIVE_TO_ETH_RATIO }>; type UploadOrigin = EnsureSigned; type InstantiateOrigin = EnsureSigned; type Time = Timestamp; diff --git a/crates/anvil-polkadot/test-data/genesis.json b/crates/anvil-polkadot/test-data/genesis.json new file mode 100644 index 0000000000000..239c5304d7abf --- /dev/null +++ b/crates/anvil-polkadot/test-data/genesis.json @@ -0,0 +1,21 @@ +{ + "config": { + "chainId": 42420 + }, + "timestamp": "0x120925", + "number": 7, + "coinbase": "0xee00000000000000000000000000000000000007", + "alloc": { + "71562b71999873db5b286df957af199ec94617f7": { + "balance": "0x1bc16d674ec80000", + "nonce": "0x01" + }, + "821a038b8787187299554cc87ec442cdcd824e65": { + "balance": "0x4563918244f40000", + "code": "0x0101010101010101010101010101010101010101", + "storage": { + "0x00": "0x01f4" + } + } + } +} \ No newline at end of file diff --git a/crates/anvil-polkadot/tests/it/gas.rs b/crates/anvil-polkadot/tests/it/gas.rs new file mode 100644 index 0000000000000..24ee03aaddd69 --- /dev/null +++ b/crates/anvil-polkadot/tests/it/gas.rs @@ -0,0 +1,171 @@ +use std::time::Duration; + +use crate::utils::{TestNode, unwrap_response}; +use alloy_primitives::{Address, U256}; +use alloy_rpc_types::TransactionRequest; +use anvil_core::eth::EthRequest; +use anvil_polkadot::config::{AnvilNodeConfig, INITIAL_BASE_FEE, SubstrateNodeConfig}; +use polkadot_sdk::pallet_revive::evm::Account; +use rstest::rstest; +use std::ops::Not; + +#[tokio::test(flavor = "multi_thread")] +#[rstest] +#[case(false)] +#[case(true)] +async fn test_set_next_fee_multiplier(#[case] rpc_driven: bool) { + // 1e18 denomination. + let new_base_fee = U256::from(6_000_000); + let anvil_node_config = AnvilNodeConfig::test_config() + .with_base_fee(rpc_driven.not().then_some(new_base_fee.to::())); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); + + let gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + + if rpc_driven { + assert_eq!(gas_price.to::(), INITIAL_BASE_FEE); + unwrap_response::<()>( + node.eth_rpc(EthRequest::SetNextBlockBaseFeePerGas(new_base_fee)).await.unwrap(), + ) + .unwrap(); + } else { + assert_eq!(gas_price, new_base_fee); + } + + // Currently the gas_price returned from evm is equivalent to the base_fee. + let gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + assert_eq!(gas_price, new_base_fee); + + // We send a regular eth transfer to check the associated effective gas price used by the + // transaction, after it will be included in a next block. We're interested especially in + // the tx effective gas price to validate that the base_fee_per_gas set previously is also + // considered when computing the fees for the tx execution. + // We could have checked the `base_fee_per_gas` after querying the latest eth block mined + // (which could have been empty too) after setting a new base fee, but it will not report the + // correct base fee because of: https://github.com/paritytech/polkadot-sdk/issues/10177. + let alith = Account::from(subxt_signer::eth::dev::alith()); + let baltathar = Account::from(subxt_signer::eth::dev::baltathar()); + let alith_initial_balance = node.get_balance(alith.address(), None).await; + let baltathar_initial_balance = node.get_balance(baltathar.address(), None).await; + let transfer_amount = U256::from_str_radix("100000000000000000", 10).unwrap(); + let transaction = TransactionRequest::default() + .value(transfer_amount) + .from(Address::from(alith.address().to_fixed_bytes())) + .to(Address::from(baltathar.address().to_fixed_bytes())); + let tx_hash = node.send_transaction(transaction, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(1, Duration::from_millis(400)).await.unwrap(); + tokio::time::sleep(Duration::from_millis(400)).await; + let transaction_receipt = node.get_transaction_receipt(tx_hash).await; + let effective_gas_price = + U256::from_be_bytes(transaction_receipt.effective_gas_price.to_big_endian()); + let gas_used = U256::from_be_bytes(transaction_receipt.gas_used.to_big_endian()); + assert_eq!(effective_gas_price, new_base_fee); + let alith_final_balance = node.get_balance(alith.address(), None).await; + let baltathar_final_balance = node.get_balance(baltathar.address(), None).await; + assert_eq!( + baltathar_final_balance, + baltathar_initial_balance + transfer_amount, + "Baltathar's balance should have changed" + ); + assert_eq!( + alith_final_balance, + alith_initial_balance - transfer_amount - effective_gas_price * gas_used, + "Alith's balance should have changed" + ); + + let block1_hash = node.block_hash_by_number(1).await.unwrap(); + let block1 = node.get_block_by_hash(block1_hash).await; + // This will fail ideally once we update to a polkadot-sdk version that includes a fix for + // https://github.com/paritytech/polkadot-sdk/issues/10177. The reported base_fer_per_gas + // should be the previously set `new_base_fee`. + assert_eq!(U256::from_be_bytes(block1.base_fee_per_gas.to_big_endian()), U256::from(5999888)); + + // Mining a second block should update the base fee according to the logic that determines + // the base_fee in relation to how congested the network is. + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(2, Duration::from_millis(500)).await.unwrap(); + let block2_hash = node.block_hash_by_number(2).await.unwrap(); + let block2 = node.get_block_by_hash(block2_hash).await; + + // This will fail ideally once we update to a polkadot-sdk version that includes a fix for + // https://github.com/paritytech/polkadot-sdk/issues/10177. + assert_eq!(U256::from_be_bytes(block2.base_fee_per_gas.to_big_endian()), 5999775); +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_next_fee_multiplier_minimum() { + // 1e18 denomination. + let new_base_fee = U256::from(50_123); + let anvil_node_config = + AnvilNodeConfig::test_config().with_base_fee(Some(new_base_fee.to::())); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); + + // Currently the gas_price returned from evm is equivalent to the base_fee. + let gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + assert_eq!(gas_price, new_base_fee); + + // We send a regular eth transfer to check the associated effective gas price used by the + // transaction, after it will be included in a next block. We're interested especially in + // the tx effective gas price to validate that the base_fee_per_gas set previously is also + // considered when computing the fees for the tx execution. + // We could have checked the `base_fee_per_gas` after querying the latest eth block mined + // (which could have been empty too) after setting a new base fee, but it will not report the + // correct base fee because of: https://github.com/paritytech/polkadot-sdk/issues/10177. + let alith = Account::from(subxt_signer::eth::dev::alith()); + let baltathar = Account::from(subxt_signer::eth::dev::baltathar()); + let alith_initial_balance = node.get_balance(alith.address(), None).await; + let baltathar_initial_balance = node.get_balance(baltathar.address(), None).await; + let transfer_amount = U256::from_str_radix("100000000000000000", 10).unwrap(); + let transaction = TransactionRequest::default() + .value(transfer_amount) + .from(Address::from(alith.address().to_fixed_bytes())) + .to(Address::from(baltathar.address().to_fixed_bytes())); + let tx_hash = node.send_transaction(transaction, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(1, Duration::from_millis(400)).await.unwrap(); + tokio::time::sleep(Duration::from_millis(400)).await; + let transaction_receipt = node.get_transaction_receipt(tx_hash).await; + let effective_gas_price = + U256::from_be_bytes(transaction_receipt.effective_gas_price.to_big_endian()); + let gas_used = U256::from_be_bytes(transaction_receipt.gas_used.to_big_endian()); + assert_eq!(effective_gas_price, new_base_fee); + let alith_final_balance = node.get_balance(alith.address(), None).await; + let baltathar_final_balance = node.get_balance(baltathar.address(), None).await; + assert_eq!( + baltathar_final_balance, + baltathar_initial_balance + transfer_amount, + "Baltathar's balance should have changed" + ); + assert_eq!( + alith_final_balance, + alith_initial_balance - transfer_amount - effective_gas_price * gas_used, + "Alith's balance should have changed" + ); + + let block1_hash = node.block_hash_by_number(1).await.unwrap(); + let block1 = node.get_block_by_hash(block1_hash).await; + + // The anvil-polkadot substrate-runtime is configured similarly to the assethub runtimes in + // terms of the minimum NextFeeMultiplier value that can be reached. The minimum is the one + // configured in the runtime, which in our case is the same as for asset-hub-westend. This + // assert should fail once https://github.com/paritytech/polkadot-sdk/issues/10177 is fixed. + // The actual value should be the previously set base_fee. + assert_eq!(U256::from_be_bytes(block1.base_fee_per_gas.to_big_endian()), U256::from(100_000)); + + // Mining a second block should update the base fee according to the logic that determines + // the base_fee in relation to how congested the network is. + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + node.wait_for_block_with_timeout(2, Duration::from_millis(500)).await.unwrap(); + let block2_hash = node.block_hash_by_number(2).await.unwrap(); + let block2 = node.get_block_by_hash(block2_hash).await; + + // However, since the previously set base_fee is lower than the minimum, this should be set + // right away to the minimum. + assert_eq!(U256::from_be_bytes(block2.base_fee_per_gas.to_big_endian()), U256::from(100_000)); +} diff --git a/crates/anvil-polkadot/tests/it/genesis.rs b/crates/anvil-polkadot/tests/it/genesis.rs index f44b0f1a9cd2b..29b270c20f086 100644 --- a/crates/anvil-polkadot/tests/it/genesis.rs +++ b/crates/anvil-polkadot/tests/it/genesis.rs @@ -10,9 +10,12 @@ use alloy_primitives::{Address, B256, Bytes, U256}; use alloy_rpc_types::{BlockId, TransactionInput, TransactionRequest}; use alloy_sol_types::SolCall; use anvil_core::eth::EthRequest; -use anvil_polkadot::config::{AnvilNodeConfig, SubstrateNodeConfig}; +use anvil_polkadot::{ + api_server::revive_conversions::ReviveAddress, + config::{AnvilNodeConfig, SubstrateNodeConfig}, +}; use polkadot_sdk::pallet_revive::{self, evm::Account}; -use std::{collections::BTreeMap, time::Duration}; +use std::{collections::BTreeMap, path::PathBuf}; use subxt::utils::H160; #[tokio::test(flavor = "multi_thread")] @@ -29,11 +32,16 @@ async fn test_genesis_params() { // Check that block number, timestamp, and chain id are set correctly at genesis assert_eq!(node.best_block_number().await, genesis_block_number); + assert_eq!(node.eth_best_block().await.number.as_u32(), genesis_block_number); + let genesis_hash = node.block_hash_by_number(genesis_block_number).await.unwrap(); // Anvil genesis timestamp is in seconds, while Substrate timestamp is in milliseconds. let genesis_timestamp = anvil_genesis_timestamp.checked_mul(1000).unwrap(); let actual_genesis_timestamp = node.get_decoded_timestamp(Some(genesis_hash)).await; assert_eq!(actual_genesis_timestamp, genesis_timestamp); + let eth_genesis_timestamp = node.get_eth_timestamp(Some(genesis_hash)).await; + assert_eq!(anvil_genesis_timestamp, eth_genesis_timestamp); + let current_chain_id_hex = unwrap_response::(node.eth_rpc(EthRequest::EthChainId(())).await.unwrap()).unwrap(); assert_eq!(current_chain_id_hex, to_hex_string(chain_id)); @@ -46,8 +54,13 @@ async fn test_genesis_params() { let latest_block_number = node.best_block_number().await; assert_eq!(latest_block_number, genesis_block_number + 2); + assert_eq!(node.eth_best_block().await.number.as_u32(), genesis_block_number + 2); + let hash2 = node.block_hash_by_number(genesis_block_number + 2).await.unwrap(); let timestamp2 = node.get_decoded_timestamp(Some(hash2)).await; + let eth_timestamp2 = node.get_eth_timestamp(Some(hash2)).await; + assert_eq!(eth_timestamp2, timestamp2 / 1000); + assert_with_tolerance( timestamp2.saturating_sub(genesis_timestamp), 2000, @@ -194,17 +207,7 @@ async fn test_genesis_alloc() { assert_eq!(contract_code_result, runtime_bytecode, "Genesis contract code should match"); // Test contract storage - let result = node - .eth_rpc(EthRequest::EthGetStorageAt( - Address::from(test_contract_bytes), - U256::from(0), - None, - )) - .await - .unwrap(); - let hex_string = unwrap_response::(result).unwrap(); - let hex_value = hex_string.strip_prefix("0x").unwrap_or(&hex_string); - let stored_value = U256::from_str_radix(hex_value, 16).unwrap(); + let stored_value = node.get_storage_at(U256::from(0), test_contract_address).await; assert_eq!(stored_value, 511, "Storage slot 0 of genesis contract should contain value 511"); // Test contract functionality by calling getValue() @@ -230,13 +233,12 @@ async fn test_coinbase_genesis() { .with_genesis(Some(Genesis { coinbase: genesis_coinbase, ..Default::default() })); let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); - unwrap_response::<()>(node.eth_rpc(EthRequest::SetAutomine(true)).await.unwrap()).unwrap(); // Deploy multicall contract let alith = Account::from(subxt_signer::eth::dev::alith()); let contract_code = get_contract_code("Multicall"); - let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), Some(1)).await; - tokio::time::sleep(Duration::from_millis(400)).await; + let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; + let _ = node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap(); // Get contract address. let receipt = node.get_transaction_receipt(tx_hash).await; @@ -253,3 +255,128 @@ async fn test_coinbase_genesis() { genesis_coinbase, ); } + +#[tokio::test(flavor = "multi_thread")] +async fn test_genesis_json() { + // Load genesis.json file from test-data directory + let genesis_json_path = + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data").join("genesis.json"); + let genesis_file = std::fs::File::open(&genesis_json_path) + .unwrap_or_else(|_| panic!("Failed to open genesis.json at {genesis_json_path:?}")); + let genesis: Genesis = serde_json::from_reader(genesis_file) + .unwrap_or_else(|e| panic!("Failed to parse genesis.json: {e}")); + + // Expected values from genesis.json + let expected_chain_id = genesis.config.chain_id; + let expected_timestamp = genesis.timestamp; + let expected_block_number = genesis.number.unwrap_or_default(); + let expected_coinbase = genesis.coinbase; + let alloc_accounts = genesis.alloc.iter(); + + // Create node config with genesis from file + let anvil_node_config = AnvilNodeConfig::test_config().with_genesis(Some(genesis.clone())); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config, substrate_node_config).await.unwrap(); + + // Test chain ID + let chain_id_hex = + unwrap_response::(node.eth_rpc(EthRequest::EthChainId(())).await.unwrap()).unwrap(); + assert_eq!( + chain_id_hex, + to_hex_string(expected_chain_id), + "Chain ID should match the one in genesis.json" + ); + + // Test block number + let genesis_block_number = node.best_block_number().await; + assert_eq!( + genesis_block_number as u64, expected_block_number, + "Genesis block number should match the one in genesis.json" + ); + + assert_eq!(node.eth_best_block().await.number.as_u64(), expected_block_number); + + // Test timestamp + let genesis_hash = node.block_hash_by_number(genesis_block_number).await.unwrap(); + // Anvil genesis timestamp is in seconds, while Substrate timestamp is in milliseconds + let expected_timestamp_ms = expected_timestamp.checked_mul(1000).unwrap(); + let actual_timestamp = node.get_decoded_timestamp(Some(genesis_hash)).await; + assert_eq!( + actual_timestamp, expected_timestamp_ms, + "Genesis timestamp should match the one in genesis.json" + ); + + let eth_genesis_timestamp = node.get_eth_timestamp(Some(genesis_hash)).await; + assert_eq!(expected_timestamp, eth_genesis_timestamp); + + // Test coinbase + let coinbase = + unwrap_response::
(node.eth_rpc(EthRequest::EthCoinbase(())).await.unwrap()) + .unwrap(); + assert_eq!(coinbase, expected_coinbase, "Coinbase should match the one in genesis.json"); + + // Scan through all accounts in the genesis alloc and test their balances, nonces, codes, and + // storage. + for (&account_addr, account_info) in alloc_accounts { + let account_balance_actual = + node.get_balance(H160::from_slice(account_addr.as_slice()), None).await; + let expected_balance = account_info.balance; + assert_eq!( + account_balance_actual, expected_balance, + "Account balance should match the one in genesis.json" + ); + let account_nonce_actual = node.get_nonce(account_addr).await; + let expected_nonce = account_info.nonce.unwrap_or_default(); + assert_eq!( + account_nonce_actual, expected_nonce, + "Account nonce should match the one in genesis.json" + ); + if account_info.code.is_none() { + let code_actual = unwrap_response::( + node.eth_rpc(EthRequest::EthGetCodeAt( + account_addr, + Some(BlockId::number(genesis_block_number.into())), + )) + .await + .unwrap(), + ) + .unwrap(); + assert!( + code_actual.is_empty(), + "Genesis account should have no code as in genesis.json" + ); + } else { + let code_actual = unwrap_response::( + node.eth_rpc(EthRequest::EthGetCodeAt( + account_addr, + Some(BlockId::number(genesis_block_number.into())), + )) + .await + .unwrap(), + ) + .unwrap(); + assert!( + !code_actual.is_empty(), + "Genesis account should have non-empty code as in genesis.json" + ); + assert_eq!( + code_actual, + account_info.code.clone().unwrap(), + "Genesis account code should match the one in genesis.json" + ); + for (storage_key, storage_value) in &account_info.storage.clone().unwrap_or_default() { + let storage_value_actual = node + .get_storage_at( + U256::from_be_bytes(storage_key.0), + ReviveAddress::from(account_addr).inner(), + ) + .await; + let expected_storage_value = U256::from_be_bytes(storage_value.0); + assert_eq!( + storage_value_actual, expected_storage_value, + "Genesis account storage value should match the one in genesis.json" + ); + } + } + } +} diff --git a/crates/anvil-polkadot/tests/it/main.rs b/crates/anvil-polkadot/tests/it/main.rs index cb7c29087c063..53c7a0f9db93d 100644 --- a/crates/anvil-polkadot/tests/it/main.rs +++ b/crates/anvil-polkadot/tests/it/main.rs @@ -1,4 +1,5 @@ mod abi; +mod gas; mod genesis; mod impersonation; mod mining; diff --git a/crates/anvil-polkadot/tests/it/sign.rs b/crates/anvil-polkadot/tests/it/sign.rs index c7fe1a7be6906..19553e974a92a 100644 --- a/crates/anvil-polkadot/tests/it/sign.rs +++ b/crates/anvil-polkadot/tests/it/sign.rs @@ -12,7 +12,6 @@ use polkadot_sdk::{ pallet_revive::evm::{Account, TransactionSigned}, sp_core::{H256, U256}, }; -use std::time::Duration; #[tokio::test(flavor = "multi_thread")] async fn can_sign_transaction() { @@ -78,7 +77,6 @@ async fn can_sign_transaction() { ) .unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.from, alith.address()); diff --git a/crates/anvil-polkadot/tests/it/snapshot.rs b/crates/anvil-polkadot/tests/it/snapshot.rs index 60ade11d39e20..b676433c8e63c 100644 --- a/crates/anvil-polkadot/tests/it/snapshot.rs +++ b/crates/anvil-polkadot/tests/it/snapshot.rs @@ -2,12 +2,10 @@ use std::time::Duration; use crate::{ abi::Multicall, - utils::{ - BlockWaitTimeout, TestNode, assert_with_tolerance, get_contract_code, unwrap_response, - }, + utils::{TestNode, assert_with_tolerance, get_contract_code, unwrap_response}, }; use alloy_primitives::{Address, Bytes, U256}; -use alloy_rpc_types::{TransactionInput, TransactionRequest}; +use alloy_rpc_types::{TransactionInput, TransactionRequest, txpool::TxpoolInspect}; use alloy_serde::WithOtherFields; use alloy_sol_types::SolCall; use anvil_core::eth::EthRequest; @@ -30,18 +28,17 @@ async fn assert_block_number_is_best_and_finalized( assert_eq!(std::convert::Into::::into(node.best_block_number().await), n); if let Some(duration) = wait_for_block_provider { tokio::time::sleep(duration).await; - let best_block = unwrap_response::( - node.eth_rpc(EthRequest::EthGetBlockByNumber( - alloy_eips::BlockNumberOrTag::Latest, - false, - )) + } + let best_block = unwrap_response::( + node.eth_rpc(EthRequest::EthGetBlockByNumber(alloy_eips::BlockNumberOrTag::Latest, false)) .await .unwrap(), - ) - .unwrap(); - let n_as_u256 = pallet_revive::U256::from(n); - assert_eq!(best_block.number, n_as_u256); + ) + .unwrap(); + let n_as_u256 = pallet_revive::U256::from(n); + assert_eq!(best_block.number, n_as_u256); + for _ in 0..3 { let finalized_block = unwrap_response::( node.eth_rpc(EthRequest::EthGetBlockByNumber( alloy_eips::BlockNumberOrTag::Finalized, @@ -51,8 +48,12 @@ async fn assert_block_number_is_best_and_finalized( .unwrap(), ) .unwrap(); - assert_eq!(finalized_block.number, n_as_u256); + if finalized_block.number == n_as_u256 { + return; + } + tokio::time::sleep(Duration::from_millis(400)).await; } + panic!("Could not reach the desired finalized block number after 3 retries."); } async fn snapshot(node: &mut TestNode, expected_snapshot_id: U256) -> U256 { @@ -68,18 +69,12 @@ async fn snapshot(node: &mut TestNode, expected_snapshot_id: U256) -> U256 { id } -async fn mine_blocks( - node: &mut TestNode, - blocks: u64, - assert_best_block: u64, - wait_for_block_provider: Option, -) { +async fn mine_blocks(node: &mut TestNode, blocks: u64, assert_best_block: u64) { unwrap_response::<()>( node.eth_rpc(EthRequest::Mine(Some(U256::from(blocks)), None)).await.unwrap(), ) .unwrap(); - assert_block_number_is_best_and_finalized(node, assert_best_block, wait_for_block_provider) - .await; + assert_block_number_is_best_and_finalized(node, assert_best_block, None).await; } async fn revert( @@ -102,7 +97,7 @@ async fn do_transfer( from: Address, to: Option
, amount: U256, - block_wait_timeout: Option, + block_number: Option, ) -> (H256, Option) { let tx_hash = if let Some(to) = to { let transaction = TransactionRequest::default().value(amount).from(from).to(to); @@ -113,8 +108,8 @@ async fn do_transfer( tx_hash }; - if let Some(BlockWaitTimeout { block_number, timeout }) = block_wait_timeout { - mine_blocks(node, 1, block_number.into(), Some(timeout)).await; + if let Some(block_number) = block_number { + mine_blocks(node, 1, block_number).await; return (tx_hash, Some(node.get_transaction_receipt(tx_hash).await)); } @@ -167,26 +162,26 @@ async fn test_best_block_after_evm_revert() { let zero = snapshot(&mut node, U256::ZERO).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Snapshot at block number 5. let one = snapshot(&mut node, U256::ONE).await; // Mine 5 more blocks. - mine_blocks(&mut node, 5, 10, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 10).await; // Snapshot again at block number 10. let two = snapshot(&mut node, U256::from(2)).await; assert_block_number_is_best_and_finalized(&mut node, 10, None).await; // Mine 5 more blocks. - mine_blocks(&mut node, 5, 15, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 15).await; // Revert to the second snapshot and assert best block number is 10. revert(&mut node, two, 10, true, None).await; // Check mining works fine after reverting. - mine_blocks(&mut node, 10, 20, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 10, 20).await; // Revert immediatelly after a snapshot (same best number is expected after the revert). let id = snapshot(&mut node, U256::from(3)).await; @@ -210,10 +205,12 @@ async fn test_balances_and_txs_index_after_evm_revert() { assert_block_number_is_best_and_finalized(&mut node, 0, None).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Snapshot at block number 5. let zero = snapshot(&mut node, U256::ZERO).await; + let initial_gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); // Get known accounts initial balances. let (alith_addr, alith_account) = alith(); @@ -223,14 +220,8 @@ async fn test_balances_and_txs_index_after_evm_revert() { // Initialize a random account. Assume its initial balance is 0. let transfer_amount = U256::from(16e17); - let (_, receipt_info) = do_transfer( - &mut node, - alith_addr, - None, - transfer_amount, - Some(BlockWaitTimeout { block_number: 6, timeout: Duration::from_millis(500) }), - ) - .await; + let (_, receipt_info) = + do_transfer(&mut node, alith_addr, None, transfer_amount, Some(6)).await; let receipt_info = receipt_info.unwrap(); let dest_h160 = receipt_info.to.unwrap(); @@ -249,14 +240,8 @@ async fn test_balances_and_txs_index_after_evm_revert() { // Make another regular transfer between known accounts. let transfer_amount = U256::from(1e17); - let (_, receipt_info) = do_transfer( - &mut node, - baltathar_addr, - Some(alith_addr), - transfer_amount, - Some(BlockWaitTimeout { block_number: 7, timeout: Duration::from_millis(500) }), - ) - .await; + let (_, receipt_info) = + do_transfer(&mut node, baltathar_addr, Some(alith_addr), transfer_amount, Some(7)).await; let receipt_info = receipt_info.unwrap(); assert_eq!(receipt_info.block_number, pallet_revive::U256::from(7)); @@ -276,9 +261,13 @@ async fn test_balances_and_txs_index_after_evm_revert() { "Alith's balance should have changed" ); - // Revert to a block before the transactions have been mined. + // Revert to a block before the transactions have been included. revert(&mut node, zero, 5, true, Some(Duration::from_millis(500))).await; + let after_revert_gas_price = + unwrap_response::(node.eth_rpc(EthRequest::EthGasPrice(())).await.unwrap()).unwrap(); + assert_eq!(initial_gas_price, after_revert_gas_price); + // Assert on accounts balances to be the initial balances. let dest_addr = Address::from(dest_h160.to_fixed_bytes()); let alith_balance = node.get_balance(alith_account.address(), None).await; @@ -291,17 +280,11 @@ async fn test_balances_and_txs_index_after_evm_revert() { assert_eq!(node.get_nonce(baltathar_addr).await, U256::ZERO); assert_eq!(node.get_nonce(dest_addr).await, U256::ZERO); - // Remine the 6th block with same txs above. + // Remine the 6th block with the same txs but included in a single block. let (tx_hash1, _) = do_transfer(&mut node, alith_addr, Some(dest_addr), U256::from(16e17), None).await; - let (tx_hash2, receipt_info2) = do_transfer( - &mut node, - baltathar_addr, - Some(alith_addr), - U256::from(1e17), - Some(BlockWaitTimeout { block_number: 6, timeout: Duration::from_millis(500) }), - ) - .await; + let (tx_hash2, receipt_info2) = + do_transfer(&mut node, baltathar_addr, Some(alith_addr), U256::from(1e17), Some(6)).await; let receipt_info2 = receipt_info2.unwrap(); let receipt_info = node.get_transaction_receipt(tx_hash1).await; let mut tx_indices = @@ -341,7 +324,7 @@ async fn test_evm_revert_and_timestamp() { let zero = snapshot(&mut node, U256::ZERO).await; // Assert on first best block number. - mine_blocks(&mut node, 1, 1, None).await; + mine_blocks(&mut node, 1, 1).await; let first_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( first_timestamp.saturating_div(1000), @@ -364,7 +347,7 @@ async fn test_evm_revert_and_timestamp() { ); // Mine 1 blocks and assert on the new best block. - mine_blocks(&mut node, 1, 2, None).await; + mine_blocks(&mut node, 1, 2).await; let second_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( second_timestamp.saturating_sub(first_timestamp), @@ -390,7 +373,7 @@ async fn test_evm_revert_and_timestamp() { "Wrong offset 2", ); - mine_blocks(&mut node, 1, 3, None).await; + mine_blocks(&mut node, 1, 3).await; let third_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( third_timestamp.saturating_sub(second_timestamp), @@ -412,7 +395,7 @@ async fn test_evm_revert_and_timestamp() { // Mine again 1 block and check again the timestamp. We should have the next block timestamp // with 1 second later than the second block timestamp. tokio::time::sleep(Duration::from_secs(1)).await; - mine_blocks(&mut node, 1, 3, None).await; + mine_blocks(&mut node, 1, 3).await; let remined_third_block_ts = node.get_decoded_timestamp(None).await; assert_with_tolerance( remined_third_block_ts.saturating_sub(second_timestamp), @@ -434,7 +417,7 @@ async fn test_evm_revert_and_timestamp() { // Mine 1 block and check the timestamp. We don't check on a specific // timestamp, but expect the time has increased a bit since the revert, which set the time back // to genesis timestamp. - mine_blocks(&mut node, 1, 1, None).await; + mine_blocks(&mut node, 1, 1).await; assert_eq!(node.best_block_number().await, 1); let remined_first_block_ts = node.get_decoded_timestamp(None).await; // Here assert that the time is increasing. @@ -451,14 +434,14 @@ async fn test_rollback() { assert_block_number_is_best_and_finalized(&mut node, 0, None).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Rollback 2 blocks. unwrap_response::<()>(node.eth_rpc(EthRequest::Rollback(Some(2))).await.unwrap()).unwrap(); assert_block_number_is_best_and_finalized(&mut node, 3, Some(Duration::from_millis(500))).await; // Check mining works fine after reverting. - mine_blocks(&mut node, 10, 13, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 10, 13).await; // Rollback 1 block. unwrap_response::<()>(node.eth_rpc(EthRequest::Rollback(None)).await.unwrap()).unwrap(); @@ -476,11 +459,11 @@ async fn test_mine_with_txs_in_mempool_before_revert() { assert_block_number_is_best_and_finalized(&mut node, 0, None).await; // Mine 5 blocks and assert on the new best block. - mine_blocks(&mut node, 5, 5, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 5, 5).await; // Snapshot at block number 5. let zero = snapshot(&mut node, U256::ZERO).await; - mine_blocks(&mut node, 5, 10, None).await; + mine_blocks(&mut node, 5, 10).await; // Get known accounts. let (alith_addr, _) = alith(); @@ -488,7 +471,7 @@ async fn test_mine_with_txs_in_mempool_before_revert() { // Initialize a random account. let transfer_amount = U256::from(16e17); - let (dest_addr, _) = + let _ = node.eth_transfer_to_unitialized_random_account(alith_addr, transfer_amount, None).await; // Make another regular transfer between known accounts. @@ -499,26 +482,29 @@ async fn test_mine_with_txs_in_mempool_before_revert() { // Revert to a block before the transactions have been sent. revert(&mut node, zero, 5, true, None).await; - let one = snapshot(&mut node, U256::ONE).await; + let inspect: TxpoolInspect = + unwrap_response(node.eth_rpc(EthRequest::TxPoolInspect(())).await.unwrap()).unwrap(); + assert_eq!(inspect.pending.len(), 2); - mine_blocks(&mut node, 1, 6, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 1, 6).await; - let txs_in_block = unwrap_response::( - node.eth_rpc(EthRequest::EthGetTransactionCountByNumber( - alloy_eips::BlockNumberOrTag::Latest, - )) - .await - .unwrap(), - ) - .unwrap(); - assert_eq!(txs_in_block, U256::from(2)); + // Get current block to verify gas_price < base_fee_per_gas + let block_number = node.best_block_number().await; + let block_hash = node.block_hash_by_number(block_number).await.unwrap(); + let block = node.get_block_by_hash(block_hash).await; + let base_fee = block.base_fee_per_gas.as_u128(); + + let pending_alith_txs = inspect.pending.get(&alith_addr).unwrap(); + let pending_baltathar_txs = inspect.pending.get(&baltathar_addr).unwrap(); + + assert_eq!(pending_alith_txs.len(), 1); + assert_eq!(pending_baltathar_txs.len(), 1); + + let summary_alith = pending_alith_txs.get("0").unwrap(); + assert!(summary_alith.gas_price < base_fee); - // Now make two more txs again with same senders, with different nonces than the actual - // accounts nonces at block 5. - let transfer_amount = U256::from(1e15); - do_transfer(&mut node, baltathar_addr, Some(alith_addr), transfer_amount, None).await; - do_transfer(&mut node, alith_addr, Some(dest_addr), transfer_amount, None).await; - revert(&mut node, one, 5, true, None).await; + let summary_baltathar = pending_baltathar_txs.get("0").unwrap(); + assert!(summary_baltathar.gas_price < base_fee); let txs_in_block = unwrap_response::( node.eth_rpc(EthRequest::EthGetTransactionCountByNumber( @@ -528,6 +514,8 @@ async fn test_mine_with_txs_in_mempool_before_revert() { .unwrap(), ) .unwrap(); + // Previous txs are not included in the block because they have + // a gas_price smaller than the current block's base_fee_per_gas. assert_eq!(txs_in_block, U256::ZERO); } @@ -544,7 +532,7 @@ async fn test_timestmap_in_contract_after_revert() { let alith = Account::from(subxt_signer::eth::dev::alith()); let contract_code = get_contract_code("Multicall"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; - mine_blocks(&mut node, 1, 1, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 1, 1).await; let first_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( @@ -587,7 +575,7 @@ async fn test_timestmap_in_contract_after_revert() { assert_eq!(timestamp, U256::from(first_timestamp.saturating_div(1000))); // Mine 1 block again and expect on the set timestamp. - mine_blocks(&mut node, 1, 2, Some(Duration::from_millis(500))).await; + mine_blocks(&mut node, 1, 2).await; let second_timestamp = node.get_decoded_timestamp(None).await; assert_with_tolerance( second_timestamp.saturating_sub(first_timestamp), diff --git a/crates/anvil-polkadot/tests/it/standard_rpc.rs b/crates/anvil-polkadot/tests/it/standard_rpc.rs index 45d10367c8e0a..ad410326203e1 100644 --- a/crates/anvil-polkadot/tests/it/standard_rpc.rs +++ b/crates/anvil-polkadot/tests/it/standard_rpc.rs @@ -164,7 +164,6 @@ async fn test_estimate_gas() { .unwrap(); let tx_hash = node.send_transaction(transaction, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let receipt = node.get_transaction_receipt(tx_hash).await; // https://github.com/paritytech/polkadot-sdk/blob/b21cbb58ab50d5d10371393967537f6f221bb92f/substrate/frame/revive/src/primitives.rs#L76 // eth_gas that is returned by estimate_gas holds both the storage deposit and @@ -299,7 +298,6 @@ async fn test_eth_get_transaction_count() { ))); let _tx_hash0 = node.send_transaction(transaction.clone(), None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; assert_eq!( unwrap_response::( node.eth_rpc(EthRequest::EthGetTransactionCount( @@ -355,7 +353,6 @@ async fn test_get_transaction_count_by_hash_number() { U256::from(0) ); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; assert_eq!( unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionCountByHash(B256::from_slice( @@ -388,7 +385,6 @@ async fn test_get_code_at() { let anvil_node_config = AnvilNodeConfig::test_config(); let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); - unwrap_response::<()>(node.eth_rpc(EthRequest::SetAutomine(true)).await.unwrap()).unwrap(); // Check random address let code = unwrap_response::( @@ -399,8 +395,8 @@ async fn test_get_code_at() { assert!(code.is_empty(), "Contract code should be empty"); let alith = Account::from(subxt_signer::eth::dev::alith()); let contract_code = get_contract_code("SimpleStorage"); - let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), Some(1)).await; - tokio::time::sleep(Duration::from_millis(400)).await; + let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; + let _ = node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap(); let receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(receipt.status, Some(pallet_revive::U256::from(1))); let contract_address = receipt.contract_address.unwrap(); @@ -458,7 +454,6 @@ async fn test_get_transaction_by_hash_and_index() { .await .unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; assert_eq!( unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionByBlockHashAndIndex( @@ -528,7 +523,6 @@ async fn test_get_transaction_by_number_and_index() { .await .unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let transaction_info_1 = unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionByBlockNumberAndIndex( @@ -576,8 +570,6 @@ async fn test_get_transaction_by_hash() { .to(Address::from(ReviveAddress::new(baltathar.address()))); let tx_hash0 = node.send_transaction(transaction.clone(), None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; - let transaction_info = unwrap_response::>( node.eth_rpc(EthRequest::EthGetTransactionByHash(B256::from_slice(tx_hash0.as_ref()))) .await @@ -695,7 +687,13 @@ async fn test_fee_history() { ) .unwrap(); assert_eq!(fee_history.gas_used_ratio.len(), 10); - assert!(fee_history.base_fee_per_gas.iter().all(|&v| v == pallet_revive::U256::from(1000000))); + // The `SlowAdjustingFeeUpdate` logic decreases the base_fee block by block if the + // activity contained within them is low. + let base_fees = + [999981, 999962, 999944, 999925, 999906, 999888, 999869, 999851, 999832, 999813, 999813]; + for (idx, base_fee) in fee_history.base_fee_per_gas.into_iter().enumerate() { + assert_eq!(base_fee, pallet_revive::U256::from(base_fees[idx])); + } } #[tokio::test(flavor = "multi_thread")] @@ -748,7 +746,6 @@ async fn test_get_logs() { let contract_code = get_contract_code("SimpleStorage"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(500)).await; let receipt = node.get_transaction_receipt(tx_hash).await; let contract_address = receipt.contract_address.unwrap(); @@ -763,7 +760,6 @@ async fn test_get_logs() { let _call_tx_hash = node.send_transaction(call_tx, None).await.unwrap(); } unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let filter = alloy_rpc_types::Filter::new() .address(Address::from(ReviveAddress::new(contract_address))) @@ -939,7 +935,6 @@ async fn test_anvil_node_info() { // Mine some blocks and check that node_info updates unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(Some(U256::from(3)), None)).await.unwrap()) .unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let node_info_after = unwrap_response::(node.eth_rpc(EthRequest::NodeInfo(())).await.unwrap()).unwrap(); @@ -998,7 +993,6 @@ async fn test_anvil_metadata() { // Mine some blocks and check that metadata updates unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(Some(U256::from(5)), None)).await.unwrap()) .unwrap(); - tokio::time::sleep(Duration::from_millis(400)).await; let metadata_after_mining = unwrap_response::( node.eth_rpc(EthRequest::AnvilMetadata(())).await.unwrap(), diff --git a/crates/anvil-polkadot/tests/it/state_injector.rs b/crates/anvil-polkadot/tests/it/state_injector.rs index 1b9945045ce83..b0e1ff50a6bfb 100644 --- a/crates/anvil-polkadot/tests/it/state_injector.rs +++ b/crates/anvil-polkadot/tests/it/state_injector.rs @@ -14,7 +14,6 @@ use anvil_polkadot::{ use anvil_rpc::error::{ErrorCode, RpcError}; use assert_matches::assert_matches; use polkadot_sdk::pallet_revive::{self, evm::Account}; -use std::time::Duration; use subxt::utils::H160; #[tokio::test(flavor = "multi_thread")] @@ -79,8 +78,6 @@ async fn test_set_chain_id() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; - let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.block_number, pallet_revive::U256::from(2)); @@ -108,7 +105,6 @@ async fn test_set_nonce() { assert_eq!(node.get_nonce(address).await, U256::from(10)); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_nonce(address).await, U256::from(10)); @@ -131,8 +127,6 @@ async fn test_set_nonce() { unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; - let transaction_receipt = node.get_transaction_receipt(tx_hash).await; assert_eq!(transaction_receipt.block_number, pallet_revive::U256::from(2)); @@ -149,7 +143,6 @@ async fn test_set_nonce() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let transaction_receipt = node.get_transaction_receipt(tx_hash).await; @@ -173,7 +166,6 @@ async fn test_set_nonce() { assert_eq!(node.get_nonce(address).await, U256::from(1)); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_nonce(address).await, U256::from(1)); } @@ -205,7 +197,6 @@ async fn test_set_balance() { assert_eq!(node.get_balance(alith, None).await, new_balance); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_balance(alith, None).await, new_balance); @@ -219,7 +210,6 @@ async fn test_set_balance() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let transaction_receipt = node.get_transaction_receipt(tx_hash).await; @@ -270,7 +260,6 @@ async fn test_set_balance() { assert_eq!(node.get_balance(baltathar, None).await, new_balance); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_balance(baltathar, None).await, new_balance); @@ -289,7 +278,6 @@ async fn test_set_balance() { assert_eq!(node.get_balance(random_addr, None).await, new_balance); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(node.get_balance(random_addr, None).await, new_balance); } @@ -314,7 +302,6 @@ async fn test_set_code_existing_contract() { .await; unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let receipt = node.get_transaction_receipt(tx_hash).await; let contract_address = Address::from(ReviveAddress::new(receipt.contract_address.unwrap())); @@ -343,7 +330,6 @@ async fn test_set_code_existing_contract() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -390,7 +376,6 @@ async fn test_set_code_existing_contract() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -465,7 +450,6 @@ async fn test_set_code_new() { assert_eq!(code, Bytes::from(runtime_bytecode.clone())); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(code, Bytes::from(runtime_bytecode)); @@ -478,7 +462,6 @@ async fn test_set_code_new() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -554,7 +537,6 @@ async fn test_set_code_of_regular_account() { assert_eq!(code, Bytes::from(runtime_bytecode.clone())); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; assert_eq!(code, Bytes::from(runtime_bytecode)); @@ -567,7 +549,6 @@ async fn test_set_code_of_regular_account() { let tx_hash = node.send_transaction(tx, None).await.unwrap(); unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(Duration::from_secs(1)).await; let _receipt = node.get_transaction_receipt(tx_hash).await; @@ -625,7 +606,6 @@ async fn test_set_storage() { let contract_code = get_contract_code("SimpleStorage"); let tx_hash = node.deploy_contract(&contract_code.init, alith.address(), None).await; unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); - tokio::time::sleep(std::time::Duration::from_millis(400)).await; let receipt = node.get_transaction_receipt(tx_hash).await; let contract_address = receipt.contract_address.unwrap(); diff --git a/crates/anvil-polkadot/tests/it/utils.rs b/crates/anvil-polkadot/tests/it/utils.rs index e466530f0298d..b34669401de0b 100644 --- a/crates/anvil-polkadot/tests/it/utils.rs +++ b/crates/anvil-polkadot/tests/it/utils.rs @@ -184,6 +184,16 @@ impl TestNode { Decode::decode(&mut input).unwrap() } + pub async fn get_eth_timestamp(&mut self, at: Option) -> u64 { + if let Some(hash) = at { + self.get_block_by_hash(hash).await + } else { + self.eth_best_block().await + } + .timestamp + .as_u64() + } + pub async fn get_nonce(&mut self, address: Address) -> U256 { unwrap_response::( self.eth_rpc(EthRequest::EthGetTransactionCount(address, None)).await.unwrap(), @@ -203,6 +213,18 @@ impl TestNode { u32::from_str_radix(num.trim_start_matches("0x"), 16).unwrap() } + pub async fn eth_best_block(&mut self) -> Block { + unwrap_response::( + self.eth_rpc(EthRequest::EthGetBlockByNumber( + alloy_eips::BlockNumberOrTag::Latest, + false, + )) + .await + .unwrap(), + ) + .unwrap() + } + pub async fn wait_for_block_with_timeout( &self, n: u32, diff --git a/crates/cheatcodes/src/evm.rs b/crates/cheatcodes/src/evm.rs index 243cddc261cf9..c5300184ec1fe 100644 --- a/crates/cheatcodes/src/evm.rs +++ b/crates/cheatcodes/src/evm.rs @@ -39,7 +39,7 @@ use serde::Serialize; mod fork; pub(crate) mod mapping; -pub(crate) mod mock; +pub mod mock; pub(crate) mod prank; /// Records storage slots reads and writes. diff --git a/crates/cheatcodes/src/inspector.rs b/crates/cheatcodes/src/inspector.rs index 1a9354e382062..3d55d29922e8e 100644 --- a/crates/cheatcodes/src/inspector.rs +++ b/crates/cheatcodes/src/inspector.rs @@ -939,6 +939,11 @@ impl Cheatcodes { self.strategy.runner.revive_remove_duplicate_account_access(self); } + // Tells whether PVM is enabled or not. + pub fn is_pvm_enabled(&mut self) -> bool { + self.strategy.runner.is_pvm_enabled(self) + } + pub fn call_with_executor( &mut self, ecx: Ecx, @@ -1022,40 +1027,51 @@ impl Cheatcodes { } } - // Handle mocked calls - if let Some(mocks) = self.mocked_calls.get_mut(&call.bytecode_address) { - let ctx = MockCallDataContext { - calldata: call.input.bytes(ecx), - value: call.transfer_value(), - }; + // Do not handle mocked calls if PVM is enabled and let the revive call handle it. + // There is literally no problem with handling mocked calls with PVM enabled here as well, + // but the downside is that if call a mocked call from the test it will not exercise the + // paths in revive that handle mocked calls and only nested mocks will be handle by the + // revive specific calls. + // This is undesirable because conformity tests could accidentally pass and the revive code + // paths be broken. + if !self.is_pvm_enabled() { + // Handle mocked calls + if let Some(mocks) = self.mocked_calls.get_mut(&call.bytecode_address) { + let ctx = MockCallDataContext { + calldata: call.input.bytes(ecx), + value: call.transfer_value(), + }; - if let Some(return_data_queue) = match mocks.get_mut(&ctx) { - Some(queue) => Some(queue), - None => mocks - .iter_mut() - .find(|(mock, _)| { - call.input.bytes(ecx).get(..mock.calldata.len()) == Some(&mock.calldata[..]) - && mock.value.is_none_or(|value| Some(value) == call.transfer_value()) - }) - .map(|(_, v)| v), - } && let Some(return_data) = if return_data_queue.len() == 1 { - // If the mocked calls stack has a single element in it, don't empty it - return_data_queue.front().map(|x| x.to_owned()) - } else { - // Else, we pop the front element - return_data_queue.pop_front() - } { - return Some(CallOutcome { - result: InterpreterResult { - result: return_data.ret_type, - output: return_data.data, - gas, - }, - memory_offset: call.return_memory_offset.clone(), - }); + if let Some(return_data_queue) = match mocks.get_mut(&ctx) { + Some(queue) => Some(queue), + None => mocks + .iter_mut() + .find(|(mock, _)| { + call.input.bytes(ecx).get(..mock.calldata.len()) + == Some(&mock.calldata[..]) + && mock + .value + .is_none_or(|value| Some(value) == call.transfer_value()) + }) + .map(|(_, v)| v), + } && let Some(return_data) = if return_data_queue.len() == 1 { + // If the mocked calls stack has a single element in it, don't empty it + return_data_queue.front().map(|x| x.to_owned()) + } else { + // Else, we pop the front element + return_data_queue.pop_front() + } { + return Some(CallOutcome { + result: InterpreterResult { + result: return_data.ret_type, + output: return_data.data, + gas, + }, + memory_offset: call.return_memory_offset.clone(), + }); + } } } - // Apply our prank if let Some(prank) = &self.get_prank(curr_depth) { // Apply delegate call, `call.caller`` will not equal `prank.prank_caller` diff --git a/crates/cheatcodes/src/lib.rs b/crates/cheatcodes/src/lib.rs index 5fe73d1aaafae..cad78b79a6038 100644 --- a/crates/cheatcodes/src/lib.rs +++ b/crates/cheatcodes/src/lib.rs @@ -57,6 +57,7 @@ mod script; pub use script::{Broadcast, Wallets, WalletsInner}; mod strategy; +pub use evm::mock::{MockCallDataContext, MockCallReturnData}; pub use strategy::{ CheatcodeInspectorStrategy, CheatcodeInspectorStrategyContext, CheatcodeInspectorStrategyExt, CheatcodeInspectorStrategyRunner, CheatcodesStrategy, EvmCheatcodeInspectorStrategyRunner, diff --git a/crates/cheatcodes/src/strategy.rs b/crates/cheatcodes/src/strategy.rs index 2623c3fd04893..e286d66c0fc1b 100644 --- a/crates/cheatcodes/src/strategy.rs +++ b/crates/cheatcodes/src/strategy.rs @@ -238,6 +238,10 @@ impl Clone for CheatcodeInspectorStrategy { /// Defined in revive-strategy pub trait CheatcodeInspectorStrategyExt { + fn is_pvm_enabled(&self, _state: &mut crate::Cheatcodes) -> bool { + false + } + fn revive_try_create( &self, _state: &mut crate::Cheatcodes, diff --git a/crates/evm/evm/src/inspectors/stack.rs b/crates/evm/evm/src/inspectors/stack.rs index 652d990b3314d..7daed9fbfae3a 100644 --- a/crates/evm/evm/src/inspectors/stack.rs +++ b/crates/evm/evm/src/inspectors/stack.rs @@ -914,8 +914,19 @@ impl Inspector> for InspectorStackRefMut<'_> ); if let Some(cheatcodes) = self.cheatcodes.as_deref_mut() { + let is_pvm_enabled = cheatcodes.is_pvm_enabled(); // Handle mocked functions, replace bytecode address with mock if matched. - if let Some(mocks) = cheatcodes.mocked_functions.get(&call.target_address) { + + // Do not handle mocked functions if PVM is enabled and let the revive call handle it. + // There is literally no problem with handling mocked functions with PVM enabled here as + // well, but the downside is that if we call a mocked functions from the test it + // will not exercise the paths in revive that handle mocked calls and only + // nested mocks will be handle by the revive specific calls. + // This is undesirable because conformity tests could accidentally pass and the revive + // code paths be broken. + if let Some(mocks) = cheatcodes.mocked_functions.get(&call.target_address) + && !is_pvm_enabled + { // Check if any mock function set for call data or if catch-all mock function set // for selector. if let Some(target) = mocks.get(&call.input.bytes(ecx)).or_else(|| { diff --git a/crates/forge/src/runner.rs b/crates/forge/src/runner.rs index aa5a2fecb037b..7f50d54c4e4ee 100644 --- a/crates/forge/src/runner.rs +++ b/crates/forge/src/runner.rs @@ -558,7 +558,11 @@ impl<'a> FunctionRunner<'a> { /// test ends, similar to `eth_call`. fn run_unit_test(mut self, func: &Function) -> TestResult { // Prepare unit test execution. + self.executor.strategy.runner.checkpoint(); + if self.prepare_test(func).is_err() { + self.executor.strategy.runner.reload_checkpoint(); + return self.result; } let mut binding = self.executor.clone(); @@ -576,10 +580,14 @@ impl<'a> FunctionRunner<'a> { Err(EvmError::Execution(err)) => (err.raw, Some(err.reason)), Err(EvmError::Skip(reason)) => { self.result.single_skip(reason); + self.executor.strategy.runner.reload_checkpoint(); + return self.result; } Err(err) => { self.result.single_fail(Some(err.to_string())); + self.executor.strategy.runner.reload_checkpoint(); + return self.result; } }; @@ -587,6 +595,8 @@ impl<'a> FunctionRunner<'a> { let success = self.executor.is_raw_call_mut_success(self.address, &mut raw_call_result, false); self.result.single_result(success, reason, raw_call_result); + self.executor.strategy.runner.reload_checkpoint(); + self.result } diff --git a/crates/forge/tests/cli/revive_vm.rs b/crates/forge/tests/cli/revive_vm.rs index b05744ddb4ac4..60f91b8c8cd87 100644 --- a/crates/forge/tests/cli/revive_vm.rs +++ b/crates/forge/tests/cli/revive_vm.rs @@ -473,52 +473,52 @@ Ran 2 tests for src/CounterTest.t.sol:CounterTest [PASS] test_Increment() ([GAS]) Traces: [..] CounterTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [0] VM::expectEmit() + ├─ [..] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] [..] CounterTest::test_Increment() - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 5 - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(55) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setNumber(55) │ ├─ emit SetNumber(result: 55) │ └─ ← [Stop] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 55 - ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::increment() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::increment() │ ├─ emit Increment(result: 56) │ └─ ← [Stop] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 56 └─ ← [Stop] [PASS] test_expectRevert() ([GAS]) Traces: [..] CounterTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [0] VM::expectEmit() + ├─ [..] VM::expectEmit() │ └─ ← [Return] ├─ emit SetNumber(result: 5) - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setNumber(5) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setNumber(5) │ ├─ emit SetNumber(result: 5) │ └─ ← [Stop] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::number() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::number() [staticcall] │ └─ ← [Return] 5 └─ ← [Stop] [..] CounterTest::test_expectRevert() - ├─ [0] VM::expectRevert(custom error 0xf28dceb3: 0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006456941a80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000076661696c7572650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) + ├─ [..] VM::expectRevert(custom error 0xf28dceb3: 0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006456941a80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000076661696c7572650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::failed_call() [staticcall] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::failed_call() [staticcall] │ └─ ← [Revert] Revert("failure") └─ ← [Stop] @@ -655,50 +655,50 @@ Ran 2 tests for src/Test.t.sol:RecordTest [PASS] testRecordAccess() ([GAS]) Traces: [..] RecordTest::testRecordAccess() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 │ └─ ← [Return] [..] bytes of code - ├─ [0] VM::record() + ├─ [..] VM::record() │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) - │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::record(0x90193C961A926261B756D1E5bb255e67ff9498A1) + │ ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::record() │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000001], [0x0000000000000000000000000000000000000000000000000000000000000001] - ├─ [0] VM::accesses(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) + ├─ [..] VM::accesses(0x90193C961A926261B756D1E5bb255e67ff9498A1) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000002], [0x0000000000000000000000000000000000000000000000000000000000000002] └─ ← [Stop] [PASS] testStopRecordAccess() ([GAS]) Traces: [..] RecordTest::testStopRecordAccess() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 │ └─ ← [Return] [..] bytes of code - ├─ [0] VM::record() + ├─ [..] VM::record() │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) - │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::record(0x90193C961A926261B756D1E5bb255e67ff9498A1) + │ ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::record() │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000001], [0x0000000000000000000000000000000000000000000000000000000000000001] - ├─ [0] VM::stopRecord() + ├─ [..] VM::stopRecord() │ └─ ← [Return] - ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::record(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f) - │ ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::record() + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::record(0x90193C961A926261B756D1E5bb255e67ff9498A1) + │ ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::record() │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000001], [0x0000000000000000000000000000000000000000000000000000000000000001] - ├─ [0] VM::record() + ├─ [..] VM::record() │ └─ ← [Return] - ├─ [0] VM::stopRecord() + ├─ [..] VM::stopRecord() │ └─ ← [Return] - ├─ [0] VM::accesses(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC) + ├─ [..] VM::accesses(0x34A1D3fff3958843C43aD80F30b94c510645C316) │ └─ ← [Return] [], [] └─ ← [Stop] @@ -972,21 +972,21 @@ Ran 7 tests for src/Test.t.sol:RecordLogsTest [PASS] testEmitRecordEmit() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] RecordLogsTest::testEmitRecordEmit() - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 2, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic12(topic1: 1, topic2: 2, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(3, 0x2e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic1(topic1: 3, data: 0x2e38edeff9493e0004540e975027a429) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x7c7d81fafce31d4330303f05da0ccb9d970101c475382b40aa072986ee4caaad, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000102e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x7c7d81fafce31d4330303f05da0ccb9d970101c475382b40aa072986ee4caaad, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000102e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 └─ ← [Stop] @@ -994,15 +994,15 @@ Traces: [PASS] testRecordOffGetsNothing() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] RecordLogsTest::testRecordOffGetsNothing() - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() + ├─ [..] VM::getRecordedLogs() │ └─ ← [Return] [] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 @@ -1011,76 +1011,76 @@ Traces: [PASS] testRecordOnEmitDifferentDepths() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] RecordLogsTest::testRecordOnEmitDifferentDepths() - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] ├─ emit LogTopic(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 │ └─ ← [Return] [..] bytes of code - ├─ [0] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) - │ ├─ [0] 0x104fBc016F4bb334D775a19E8A6510109AC63E00::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + │ ├─ [..] 0xd04404bcf6d969FC0Ec22021b4736510CAcec492::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x61fb7db3625c10432927a76bb32400c33a94e9bb6374137c4cd59f6e465bfdcb, 0x0000000000000000000000000000000000000000000000000000000000000001], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001043a26051362b8040b289abe93334a5e300000000000000000000000000000000, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496), ([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x104fBc016F4bb334D775a19E8A6510109AC63E00)] - ├─ [..] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::getEmitterAddr() [staticcall] - │ └─ ← [Return] 0x104fBc016F4bb334D775a19E8A6510109AC63E00 + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x61fb7db3625c10432927a76bb32400c33a94e9bb6374137c4cd59f6e465bfdcb, 0x0000000000000000000000000000000000000000000000000000000000000001], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001043a26051362b8040b289abe93334a5e300000000000000000000000000000000, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496), ([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316), ([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0xd04404bcf6d969FC0Ec22021b4736510CAcec492)] + ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::getEmitterAddr() [staticcall] + │ └─ ← [Return] 0xd04404bcf6d969FC0Ec22021b4736510CAcec492 └─ ← [Stop] [PASS] testRecordOnNoLogs() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] - [4118] RecordLogsTest::testRecordOnNoLogs() - ├─ [0] VM::recordLogs() + [..] RecordLogsTest::testRecordOnNoLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [0] VM::getRecordedLogs() + ├─ [..] VM::getRecordedLogs() │ └─ ← [Return] [] └─ ← [Stop] [PASS] testRecordOnSingleLog() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] RecordLogsTest::testRecordOnSingleLog() - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 2, 3, 0x4576656e74204461746120696e20537472696e67) │ ├─ emit LogTopic123(topic1: 1, topic2: 2, topic3: 3, data: 0x4576656e74204461746120696e20537472696e67) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000144576656e74204461746120696e20537472696e67000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000001, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000144576656e74204461746120696e20537472696e67000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] └─ ← [Stop] [PASS] testRecordOnSingleLogTopic0() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] RecordLogsTest::testRecordOnSingleLogTopic0() - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a429) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c1693502e38edeff9493e0004540e975027a42900000000000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 └─ ← [Stop] @@ -1088,36 +1088,36 @@ Traces: [PASS] testRecordsConsumednAsRead() ([GAS]) Traces: [..] RecordLogsTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] RecordLogsTest::testRecordsConsumednAsRead() - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(1, 0x43a26051362b8040b289abe93334a5e3) │ ├─ emit LogTopic1(topic1: 1, data: 0x43a26051362b8040b289abe93334a5e3) │ └─ ← [Stop] - ├─ [0] VM::recordLogs() + ├─ [..] VM::recordLogs() │ └─ ← [Return] - ├─ [0] VM::getRecordedLogs() + ├─ [..] VM::getRecordedLogs() │ └─ ← [Return] [] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(2, 3, 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ ├─ emit LogTopic12(topic1: 2, topic2: 3, data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0x7af92d5e3102a27d908bb1859fdef71b723f3c438e5d84f3af49dab68e18dc6d, 0x0000000000000000000000000000000000000000000000000000000000000002, 0x0000000000000000000000000000000000000000000000000000000000000003], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001843a26051362b8040b289abe93334a5e3662751aa691185ae0000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(4, 5, 6, 0x43a26051362b8040b289abe93334a5e3662751aa) │ ├─ emit LogTopic123(topic1: 4, topic2: 5, topic3: 6, data: 0x43a26051362b8040b289abe93334a5e3662751aa) │ └─ ← [Stop] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ ├─ emit LogTopic0(data: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC), ([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000004, 0x0000000000000000000000000000000000000000000000000000000000000005, 0x0000000000000000000000000000000000000000000000000000000000000006], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001443a26051362b8040b289abe93334a5e3662751aa000000000000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316), ([0x0a28c6fad56bcbad1788721e440963b3b762934a3134924733eaf8622cb44279], 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002043a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::emitEvent(7, 8, 9, 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ ├─ emit LogTopic123(topic1: 7, topic2: 8, topic3: 9, data: 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4) │ └─ ← [Stop] - ├─ [0] VM::getRecordedLogs() - │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000007, 0x0000000000000000000000000000000000000000000000000000000000000008, 0x0000000000000000000000000000000000000000000000000000000000000009], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000182e38edeff9493e0004540e975027a429ee666d1289f2c7a40000000000000000, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC)] + ├─ [..] VM::getRecordedLogs() + │ └─ ← [Return] [([0xb6d650e5d0bbc0e92ff784e346ada394e49aa2d74a5cee8b099fa1a469bdc452, 0x0000000000000000000000000000000000000000000000000000000000000007, 0x0000000000000000000000000000000000000000000000000000000000000008, 0x0000000000000000000000000000000000000000000000000000000000000009], 0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000182e38edeff9493e0004540e975027a429ee666d1289f2c7a40000000000000000, 0x34A1D3fff3958843C43aD80F30b94c510645C316)] ├─ storage changes: │ @ 1: 0x43a26051362b8040b289abe93334a5e3662751aa691185ae9e9a2e1e0c169350 → 0x2e38edeff9493e0004540e975027a429ee666d1289f2c7a4232d03ee63e14e30 └─ ← [Stop] @@ -1268,57 +1268,57 @@ Ran 3 tests for src/Test.t.sol:StateDiffTest [PASS] testCallProxyaccesses() ([GAS]) Traces: [..] StateDiffTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] StateDiffTest::testCallProxyaccesses() - ├─ [0] VM::startStateDiffRecording() + ├─ [..] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [..] 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f::proxyCall(55) - │ ├─ [0] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) + ├─ [..] 0x90193C961A926261B756D1E5bb255e67ff9498A1::proxyCall(55) + │ ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setter(55) │ │ └─ ← [Return] │ └─ ← [Stop] - ├─ [0] VM::stopAndReturnStateDiff() - │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 0, 0xac1b14ff0000000000000000000000000000000000000000000000000000000000000037, false, [(0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, 0x0000000000000000000000000000000000000000000000000000000000000000, false, 0x0000000000000000000000007d8cb8f412b3ee9ac79558791333f41d2b1ccdac, 0x0000000000000000000000007d8cb8f412b3ee9ac79558791333f41d2b1ccdac, false)], 1), ((0, 31337 [3.133e4]), 0, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 2)] + ├─ [..] VM::stopAndReturnStateDiff() + │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x90193C961A926261B756D1E5bb255e67ff9498A1, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 0, 0xac1b14ff0000000000000000000000000000000000000000000000000000000000000037, false, [(0x90193C961A926261B756D1E5bb255e67ff9498A1, 0x0000000000000000000000000000000000000000000000000000000000000000, false, 0x00000000000000000000000034a1d3fff3958843c43ad80f30b94c510645c316, 0x00000000000000000000000034a1d3fff3958843c43ad80f30b94c510645c316, false)], 1), ((0, 31337 [3.133e4]), 0, 0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x90193C961A926261B756D1E5bb255e67ff9498A1, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 2)] └─ ← [Stop] [PASS] testCallaccesses() ([GAS]) Traces: [..] StateDiffTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] StateDiffTest::testCallaccesses() - ├─ [0] VM::startStateDiffRecording() + ├─ [..] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [..] 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC::setter(55) + ├─ [..] 0x34A1D3fff3958843C43aD80F30b94c510645C316::setter(55) │ └─ ← [Stop] - ├─ [0] VM::stopAndReturnStateDiff() - │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 1)] + ├─ [..] VM::stopAndReturnStateDiff() + │ └─ ← [Return] [((0, 31337 [3.133e4]), 0, 0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 1000000000000000000 [1e18], 1000000000000000000 [1e18], 0x, 0, 0xd423740b0000000000000000000000000000000000000000000000000000000000000037, false, [(0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000064, false), (0x34A1D3fff3958843C43aD80F30b94c510645C316, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000064, 0x0000000000000000000000000000000000000000000000000000000000000037, false)], 1)] └─ ← [Stop] [PASS] testCreateaccesses() ([GAS]) Traces: [..] StateDiffTest::setUp() - ├─ [..] → new @0x7D8CB8F412B3ee9AC79558791333F41d2b1ccDAC + ├─ [..] → new @0x34A1D3fff3958843C43aD80F30b94c510645C316 │ └─ ← [Return] [..] bytes of code - ├─ [..] → new @0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + ├─ [..] → new @0x90193C961A926261B756D1E5bb255e67ff9498A1 │ └─ ← [Return] [..] bytes of code └─ ← [Stop] [..] StateDiffTest::testCreateaccesses() - ├─ [0] VM::startStateDiffRecording() + ├─ [..] VM::startStateDiffRecording() │ └─ ← [Return] - ├─ [..] → new @0x2e234DAe75C793f67A35089C9d99245E1C58470b + ├─ [..] → new @0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496 │ └─ ← [Return] [..] bytes of code - ├─ [0] VM::stopAndReturnStateDiff() - │ └─ ← [Return] [((0, 31337 [3.133e4]), 4, 0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 1000000000000000000 [1e18], 0x0000000000000000000000000000000000000000000000000000000000000064, false, [(0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000, false), (0x2e234DAe75C793f67A35089C9d99245E1C58470b, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000064, false)], 1)] + ├─ [..] VM::stopAndReturnStateDiff() + │ └─ ← [Return] [((0, 31337 [3.133e4]), 4, 0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496, 0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496, true, 0, 1000000000000000000 [1e18], 0x, 1000000000000000000 [1e18], 0x0000000000000000000000000000000000000000000000000000000000000064, false, [(0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496, 0x0000000000000000000000000000000000000000000000000000000000000001, false, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000000, false), (0xA8452Ec99ce0C64f20701dB7dD3abDb607c00496, 0x0000000000000000000000000000000000000000000000000000000000000001, true, 0x0000000000000000000000000000000000000000000000000000000000000000, 0x0000000000000000000000000000000000000000000000000000000000000064, false)], 1)] └─ ← [Stop] Suite result: ok. 3 passed; 0 failed; 0 skipped; [ELAPSED] diff --git a/crates/forge/tests/it/revive/cheat_etch.rs b/crates/forge/tests/it/revive/cheat_etch.rs new file mode 100644 index 0000000000000..87b043e58d63b --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_etch.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm_mode_with_any_etched_evm_code(ReviveRuntimeMode::Pvm)] +#[case::evm_mode_with_any_etched_evm_code(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_etch(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", "EtchTest", ".*/revive/EtchTest.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_gas_metering.rs b/crates/forge/tests/it/revive/cheat_gas_metering.rs new file mode 100644 index 0000000000000..6eb4f9c12287e --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_gas_metering.rs @@ -0,0 +1,49 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_pause_gas_metering_with_pvm_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testPauseGasMeteringWithPvmCall", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_resume_gas_metering_with_pvm_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testResumeGasMeteringWithPvmCall", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_reset_gas_metering_with_pvm_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testResetGasMeteringWithPvmCall", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_create_during_paused_metering(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testCreateDuringPausedMetering", "GasMetering", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_mock_call.rs b/crates/forge/tests/it/revive/cheat_mock_call.rs new file mode 100644 index 0000000000000..d291db31cc65a --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_mock_call.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_call(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", "MockCall", ".*/revive/MockCall.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_mock_calls.rs b/crates/forge/tests/it/revive/cheat_mock_calls.rs new file mode 100644 index 0000000000000..a4fa94f31ce41 --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_mock_calls.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_calls(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", ".*", ".*/revive/MockCalls.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_mock_functions.rs b/crates/forge/tests/it/revive/cheat_mock_functions.rs new file mode 100644 index 0000000000000..f31e5ed17e91c --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_mock_functions.rs @@ -0,0 +1,43 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mockx_function(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("test_mockx_function", "MockFunction", ".*/revive/MockFunction.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_function_concrete_args(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new( + "test_mock_function_concrete_args", + "MockFunction", + ".*/revive/MockFunction.t.sol", + ); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_mock_function_all_args(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = + Filter::new("test_mock_function_all_args", "MockFunction", ".*/revive/MockFunction.t.sol"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/cheat_prank.rs b/crates/forge/tests/it/revive/cheat_prank.rs new file mode 100644 index 0000000000000..31c850d07b0ef --- /dev/null +++ b/crates/forge/tests/it/revive/cheat_prank.rs @@ -0,0 +1,16 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_revive_prank(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new(".*", ".*", ".*/revive/Prank.t.sol.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/migration.rs b/crates/forge/tests/it/revive/migration.rs index b9a4620058a36..4907e39e9088b 100644 --- a/crates/forge/tests/it/revive/migration.rs +++ b/crates/forge/tests/it/revive/migration.rs @@ -39,7 +39,7 @@ async fn test_revive_bytecode_migration(#[case] runtime_mode: ReviveRuntimeMode) #[rstest] #[case::pvm(ReviveRuntimeMode::Pvm)] -// TODO: Add Evm test when pallet-revive will allow for Evm bytecode upload +#[case::evm(ReviveRuntimeMode::Evm)] #[tokio::test(flavor = "multi_thread")] async fn test_revive_bytecode_migration_to_revive(#[case] runtime_mode: ReviveRuntimeMode) { let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); @@ -67,3 +67,23 @@ async fn test_revive_timestamp_migration(#[case] runtime_mode: ReviveRuntimeMode let filter = Filter::new("testTimestampMigration", "EvmReviveMigrationTest", ".*/revive/.*"); TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; } + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_revive_immutables_migration(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testImmutablesMigration", "EvmReviveMigrationTest", ".*/revive/.*"); + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_revive_callback_from_revive(#[case] runtime_mode: ReviveRuntimeMode) { + let runner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testCallbackFromRevive", "EvmReviveMigrationTest", ".*/revive/.*"); + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/forge/tests/it/revive/mod.rs b/crates/forge/tests/it/revive/mod.rs index dd8e7248c9892..2d929b8d534d8 100644 --- a/crates/forge/tests/it/revive/mod.rs +++ b/crates/forge/tests/it/revive/mod.rs @@ -1,4 +1,11 @@ //! Revive strategy tests +pub mod cheat_etch; +pub mod cheat_gas_metering; +pub mod cheat_mock_call; +pub mod cheat_mock_calls; +pub mod cheat_mock_functions; +pub mod cheat_prank; pub mod cheat_store; pub mod migration; +pub mod tx_gas_price; diff --git a/crates/forge/tests/it/revive/tx_gas_price.rs b/crates/forge/tests/it/revive/tx_gas_price.rs new file mode 100644 index 0000000000000..1485b4c42c5b9 --- /dev/null +++ b/crates/forge/tests/it/revive/tx_gas_price.rs @@ -0,0 +1,60 @@ +use crate::{config::*, test_helpers::TEST_DATA_REVIVE}; +use foundry_test_utils::Filter; +use revive_strategy::ReviveRuntimeMode; +use revm::primitives::hardfork::SpecId; +use rstest::rstest; + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorks", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works_with_zero(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorksWithZero", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works_with_large_value(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorksWithLargeValue", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_works_in_both_modes(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPriceWorksInBothModes", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} + +#[rstest] +#[case::pvm(ReviveRuntimeMode::Pvm)] +#[case::evm(ReviveRuntimeMode::Evm)] +#[tokio::test(flavor = "multi_thread")] +async fn test_tx_gas_price_preserved_in_pvm_contract(#[case] runtime_mode: ReviveRuntimeMode) { + let runner: forge::MultiContractRunner = TEST_DATA_REVIVE.runner_revive(runtime_mode); + let filter = Filter::new("testTxGasPricePreservedInPvmContract", "TxGasPrice", ".*/revive/.*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::PRAGUE).run().await; +} diff --git a/crates/revive-env/Cargo.toml b/crates/revive-env/Cargo.toml index f02c3fb552718..379242b4204be 100644 --- a/crates/revive-env/Cargo.toml +++ b/crates/revive-env/Cargo.toml @@ -16,14 +16,7 @@ std = ["polkadot-sdk/std"] codec = { version = "3.7.5", default-features = false, package = "parity-scale-codec" } scale-info = { version = "2.11.6", default-features = false } -polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ - "experimental", - "runtime", - "polkadot-runtime-common", - "pallet-revive", - "pallet-balances", - "pallet-timestamp" -]} +polkadot-sdk.workspace = true [lints] workspace = true diff --git a/crates/revive-env/src/runtime.rs b/crates/revive-env/src/runtime.rs index a3e09e7cb0992..417000c927f80 100644 --- a/crates/revive-env/src/runtime.rs +++ b/crates/revive-env/src/runtime.rs @@ -87,6 +87,7 @@ impl pallet_revive::Config for Runtime { type ChainId = ConstU64<420_420_420>; type NativeToEthRatio = ConstU32<1_000_000_000>; type FindAuthor = Self; + type DebugEnabled = ConstBool; } impl FindAuthor<::AccountId> for Runtime { diff --git a/crates/revive-strategy/Cargo.toml b/crates/revive-strategy/Cargo.toml index a6141c95fe9e3..1ac668581d30a 100644 --- a/crates/revive-strategy/Cargo.toml +++ b/crates/revive-strategy/Cargo.toml @@ -36,15 +36,7 @@ semver.workspace = true codec = { version = "3.7.5", default-features = false, package = "parity-scale-codec" } scale-info = { version = "2.11.6", default-features = false } -polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ - "experimental", - "runtime", - "polkadot-runtime-common", - "pallet-revive", - "pallet-balances", - "pallet-timestamp" -]} - +polkadot-sdk.workspace = true [lints] workspace = true diff --git a/crates/revive-strategy/src/cheatcodes/mock_handler.rs b/crates/revive-strategy/src/cheatcodes/mock_handler.rs new file mode 100644 index 0000000000000..a81a8d0cf2d06 --- /dev/null +++ b/crates/revive-strategy/src/cheatcodes/mock_handler.rs @@ -0,0 +1,211 @@ +use std::{ + cell::RefCell, + collections::{BTreeMap, VecDeque}, + rc::Rc, +}; + +use alloy_primitives::{Address, Bytes, map::foldhash::HashMap, ruint::aliases::U256}; +use foundry_cheatcodes::{Ecx, MockCallDataContext, MockCallReturnData}; +use polkadot_sdk::{ + frame_system, + pallet_revive::{ + self, AddressMapper, DelegateInfo, ExecOrigin, ExecReturnValue, Pallet, mock::MockHandler, + }, + pallet_revive_uapi::ReturnFlags, + polkadot_sdk_frame::prelude::OriginFor, + sp_core::H160, +}; +use revive_env::{AccountId, Runtime}; + +use revm::{context::JournalTr, interpreter::InstructionResult}; + +// Implementation object that holds the mock state and implements the MockHandler trait for Revive. +// It is only purpose is to make transferring the mock state into the Revive EVM easier and then +// synchronize whatever mocks got consumed back into the Cheatcodes state after the call. +#[derive(Clone)] +pub(crate) struct MockHandlerImpl { + inner: Rc>>, + pub _prank_enabled: bool, +} + +impl MockHandlerImpl { + /// Creates a new MockHandlerImpl from the given Ecx and Cheatcodes state. + pub(crate) fn new( + ecx: &Ecx<'_, '_, '_>, + caller: &Address, + target_address: Option<&Address>, + callee: Option<&Address>, + state: &mut foundry_cheatcodes::Cheatcodes, + ) -> Self { + let (inject_env, prank_enabled) = + MockHandlerInner::new(ecx, caller, target_address, callee, state); + Self { inner: Rc::new(RefCell::new(inject_env)), _prank_enabled: prank_enabled } + } + + /// Updates the given Cheatcodes state with the current mock state. + /// This is used to synchronize the mock state after a call has been executed in Revive + pub(crate) fn update_state_mocks(&self, state: &mut foundry_cheatcodes::Cheatcodes) { + let mock_inner = self.inner.borrow(); + state.mocked_calls = mock_inner.mocked_calls.clone(); + state.mocked_functions = mock_inner.mocked_functions.clone(); + } + + pub(crate) fn fund_pranked_accounts(&self, account: Address) { + // Fuzzed prank addresses have no balance, so they won't exist in revive, and + // calls will fail, this is not a problem when running in REVM. + // TODO: Figure it out why this is still needed. + let balance = Pallet::::evm_balance(&H160::from_slice(account.as_slice())); + if balance == 0.into() { + Pallet::::set_evm_balance( + &H160::from_slice(account.as_slice()), + u128::MAX.into(), + ) + .expect("Could not fund pranked account"); + } + } +} + +impl MockHandler for MockHandlerImpl { + fn mock_call( + &self, + callee: H160, + call_data: &[u8], + value_transferred: polkadot_sdk::pallet_revive::U256, + ) -> Option { + let mut mock_inner = self.inner.borrow_mut(); + let ctx = MockCallDataContext { + calldata: call_data.to_vec().into(), + value: Some(U256::from_limbs(value_transferred.0)), + }; + + // Use the same logic as in inspect.rs to find the correct mocked call and consume some of + // them. https://github.com/paritytech/foundry-polkadot/blob/26eda0de53ac03f7ac9b6a6023d8243101cffaf1/crates/cheatcodes/src/inspector.rs#L1013 + if let Some(mock_data) = + mock_inner.mocked_calls.get_mut(&Address::from_slice(callee.as_bytes())) + { + if let Some(return_data_queue) = match mock_data.get_mut(&ctx) { + Some(found) => Some(found), + None => mock_data + .iter_mut() + .find(|(key, _)| { + ctx.calldata.starts_with(&key.calldata) + && (key.value.is_none() + || ctx.value == key.value + || (ctx.value == Some(U256::ZERO) && key.value.is_none())) + }) + .map(|(_, v)| v), + } && let Some(return_data) = if return_data_queue.len() == 1 { + // If the mocked calls stack has a single element in it, don't empty it + return_data_queue.front().map(|x| x.to_owned()) + } else { + // Else, we pop the front element + return_data_queue.pop_front() + } { + return Some(ExecReturnValue { + flags: if matches!(return_data.ret_type, InstructionResult::Revert) { + ReturnFlags::REVERT + } else { + ReturnFlags::default() + }, + data: return_data.data.0.to_vec(), + }); + } + }; + None + } + + fn mock_caller(&self, frames_len: usize) -> Option> { + let mock_inner = self.inner.borrow(); + if frames_len == 0 && mock_inner.delegated_caller.is_none() { + return Some(mock_inner.caller.clone()); + } + None + } + + fn mock_delegated_caller( + &self, + dest: H160, + input_data: &[u8], + ) -> Option> { + let mock_inner = self.inner.borrow(); + + // Mocked functions are implemented by making use of the hooks for delegated calls. + if let Some(mocked_function) = + mock_inner.mocked_functions.get(&Address::from_slice(dest.as_bytes())) + { + let input_data = Bytes::from(input_data.to_vec()); + if let Some(target) = mocked_function + .get(&input_data) + .or_else(|| input_data.get(..4).and_then(|selector| mocked_function.get(selector))) + { + return Some(DelegateInfo { + caller: + ExecOrigin::::from_runtime_origin(OriginFor::::signed( + ::AddressMapper::to_account_id(&dest), + )).ok()?, + callee: H160::from_slice(target.as_slice()) + } + ); + } + } + + mock_inner.delegated_caller.as_ref().and_then(|delegate_caller| { + Some(DelegateInfo { + caller: ExecOrigin::::from_runtime_origin(delegate_caller.clone()).ok()?, + callee: mock_inner.callee, + }) + }) + } +} + +// Internal struct that holds the mock state. It is wrapped in an Arc> in MockHandlerImpl +// to make it easier to transfer the state into Revive and back and be able to mutate it from the +// MockHandler trait methods. +#[derive(Clone)] +struct MockHandlerInner { + pub caller: OriginFor, + pub delegated_caller: Option>, + pub callee: H160, + + pub mocked_calls: HashMap>>, + pub mocked_functions: HashMap>, +} + +impl MockHandlerInner { + /// Creates a new MockHandlerInner from the given Ecx and Cheatcodes state. + /// Also returns whether a prank is currently enabled. + fn new( + ecx: &Ecx<'_, '_, '_>, + caller: &Address, + target_address: Option<&Address>, + callee: Option<&Address>, + state: &mut foundry_cheatcodes::Cheatcodes, + ) -> (Self, bool) { + let curr_depth = ecx.journaled_state.depth(); + let mut prank_enabled = false; + let pranked_caller = OriginFor::::signed(AccountId::to_fallback_account_id( + &H160::from_slice(caller.as_slice()), + )); + + let delegated_caller = target_address.map(|addr| { + OriginFor::::signed(AccountId::to_fallback_account_id(&H160::from_slice( + addr.as_slice(), + ))) + }); + + let state_inject = Self { + caller: pranked_caller, + delegated_caller, + mocked_calls: state.mocked_calls.clone(), + callee: callee.map(|addr| H160::from_slice(addr.as_slice())).unwrap_or_default(), + mocked_functions: state.mocked_functions.clone(), + }; + if let Some(prank) = &state.get_prank(curr_depth) { + if curr_depth >= prank.depth { + prank_enabled = true; + } + } + (state_inject, prank_enabled) + } +} diff --git a/crates/revive-strategy/src/cheatcodes/mod.rs b/crates/revive-strategy/src/cheatcodes/mod.rs index 5fe5c4d81af6e..1d70b7399a4a3 100644 --- a/crates/revive-strategy/src/cheatcodes/mod.rs +++ b/crates/revive-strategy/src/cheatcodes/mod.rs @@ -1,3 +1,5 @@ +mod mock_handler; + use alloy_primitives::{Address, B256, Bytes, Log, hex, ruint::aliases::U256}; use alloy_rpc_types::BlobTransactionSidecar; use alloy_sol_types::SolValue; @@ -6,11 +8,12 @@ use foundry_cheatcodes::{ CheatcodeInspectorStrategyContext, CheatcodeInspectorStrategyRunner, CheatsConfig, CheatsCtxt, CommonCreateInput, DealRecord, Ecx, Error, EvmCheatcodeInspectorStrategyRunner, Result, Vm::{ - dealCall, getNonce_0Call, loadCall, pvmCall, resetNonceCall, rollCall, setNonceCall, - setNonceUnsafeCall, storeCall, warpCall, + dealCall, etchCall, getNonce_0Call, loadCall, pvmCall, resetNonceCall, rollCall, + setNonceCall, setNonceUnsafeCall, storeCall, warpCall, }, journaled_account, precompile_error, }; + use foundry_compilers::resolc::dual_compiled_contracts::DualCompiledContracts; use revive_env::{AccountId, Runtime, System, Timestamp}; use std::{ @@ -20,28 +23,30 @@ use std::{ }; use tracing::warn; +use alloy_eips::eip7702::SignedAuthorization; use polkadot_sdk::{ pallet_revive::{ - self, AccountInfo, AddressMapper, BalanceOf, Code, ContractInfo, ExecConfig, Pallet, - evm::CallTrace, + self, AccountInfo, AddressMapper, BalanceOf, BytecodeType, Code, ContractInfo, + DebugSettings, ExecConfig, Executable, Pallet, evm::CallTrace, }, polkadot_sdk_frame::prelude::OriginFor, - sp_core::{self, H160}, + sp_core::{self, H160, H256}, + sp_io, sp_weights::Weight, }; use crate::{ + cheatcodes::mock_handler::MockHandlerImpl, execute_with_externalities, tracing::{Tracer, storage_tracer::AccountAccess}, }; use foundry_cheatcodes::Vm::{AccountAccess as FAccountAccess, ChainInfo}; -use alloy_eips::eip7702::SignedAuthorization; use revm::{ bytecode::opcode as op, context::{CreateScheme, JournalTr}, interpreter::{ - CallInputs, CallOutcome, CreateOutcome, Gas, InstructionResult, Interpreter, + CallInputs, CallOutcome, CallScheme, CreateOutcome, Gas, InstructionResult, Interpreter, InterpreterResult, interpreter_types::Jumps, }, state::Bytecode, @@ -196,6 +201,49 @@ fn set_block_number(new_height: U256, ecx: Ecx<'_, '_, '_>) { }); } +// Implements the `etch` cheatcode for PVM. +fn etch_call(target: &Address, new_runtime_code: &Bytes, ecx: Ecx<'_, '_, '_>) -> Result { + let origin_address = H160::from_slice(ecx.tx.caller.as_slice()); + let origin_account = AccountId::to_fallback_account_id(&origin_address); + + execute_with_externalities(|externalities| { + externalities.execute_with(|| { + let code = new_runtime_code.to_vec(); + let code_type = + if code.starts_with(b"PVM\0") { BytecodeType::Pvm } else { BytecodeType::Evm }; + let contract_blob = Pallet::::try_upload_code( + origin_account.clone(), + code, + code_type, + BalanceOf::::MAX, + &ExecConfig::new_substrate_tx(), + ) + .map_err(|_| <&str as Into>::into("Could not upload PVM code"))? + .0; + + let mut contract_info = if let Some(contract_info) = + AccountInfo::::load_contract(&H160::from_slice(target.as_slice())) + { + contract_info + } else { + ContractInfo::::new( + &origin_address, + System::account_nonce(origin_account), + *contract_blob.code_hash(), + ) + .map_err(|_| <&str as Into>::into("Could not create contract info"))? + }; + contract_info.code_hash = *contract_blob.code_hash(); + AccountInfo::::insert_contract( + &H160::from_slice(target.as_slice()), + contract_info, + ); + Ok::<(), Error>(()) + }) + })?; + Ok(Default::default()) +} + fn set_timestamp(new_timestamp: U256, ecx: Ecx<'_, '_, '_>) { // Set timestamp in EVM context (seconds). ecx.block.timestamp = new_timestamp; @@ -389,6 +437,12 @@ impl CheatcodeInspectorStrategyRunner for PvmCheatcodeInspectorStrategyRunner { Ok(Default::default()) } + t if using_pvm && is::(t) => { + let etchCall { target, newRuntimeBytecode } = + cheatcode.as_any().downcast_ref().unwrap(); + etch_call(target, newRuntimeBytecode, ccx.ecx)?; + Ok(Default::default()) + } t if using_pvm && is::(t) => { tracing::info!(cheatcode = ?cheatcode.as_debug() , using_pvm = ?using_pvm); let &loadCall { target, slot } = cheatcode.as_any().downcast_ref().unwrap(); @@ -559,9 +613,7 @@ fn select_revive(ctx: &mut PvmCheatcodeInspectorStrategyContext, data: Ecx<'_, ' System::set_block_number(block_number.saturating_to()); Timestamp::set_timestamp(timestamp.saturating_to::() * 1000); - let test_contract = data.journaled_state.database.get_test_contract_address(); let persistent_accounts = data.journaled_state.database.persistent_accounts().clone(); - for address in persistent_accounts.into_iter().chain([data.tx.caller]) { let acc = data.journaled_state.load_account(address).expect("failed to load account"); let amount = acc.data.info.balance; @@ -577,57 +629,75 @@ fn select_revive(ctx: &mut PvmCheatcodeInspectorStrategyContext, data: Ecx<'_, ' a.nonce = nonce.min(u32::MAX.into()).try_into().expect("shouldn't happen"); }); - // TODO handle immutables - // Migrate bytecode for deployed contracts (skip test contract) - if test_contract != Some(address) - && let Some(bytecode) = acc.data.info.code.as_ref() { - + if let Some(bytecode) = acc.data.info.code.as_ref() { let account_h160 = H160::from_slice(address.as_slice()); // Skip if contract already exists in pallet-revive if AccountInfo::::load_contract(&account_h160).is_none() { - // Determine which bytecode to upload based on runtime mode - let bytecode_to_upload = ctx.dual_compiled_contracts - .find_by_evm_deployed_bytecode_with_immutables(bytecode.original_byte_slice()) - .and_then(|(_, contract)| { - match ctx.runtime_mode { - crate::ReviveRuntimeMode::Pvm => contract.resolc_bytecode.as_bytes().map(|b| b.to_vec()), - crate::ReviveRuntimeMode::Evm => None, - // TODO: We do not have method to upload the EVM bytecode to pallet-revive - //contract.evm_bytecode.as_bytes().map(|b| b.to_vec()) + // Find the matching dual-compiled contract by EVM bytecode + if let Some((_, contract)) = ctx.dual_compiled_contracts + .find_by_evm_deployed_bytecode_with_immutables(bytecode.original_byte_slice()) + { + let (code_bytes, immutable_data, code_type) = match ctx.runtime_mode { + crate::ReviveRuntimeMode::Pvm => { + let immutable_data = contract.evm_immutable_references + .as_ref() + .map(|immutable_refs| { + let evm_bytecode = bytecode.original_byte_slice(); + + // Collect all immutable bytes from their scattered offsets + immutable_refs + .values() + .flatten() + .flat_map(|offset| { + let start = offset.start as usize; + let end = start + offset.length as usize; + evm_bytecode.get(start..end).unwrap_or_else(|| panic!("Immutable offset out of bounds: address={:?}, offset={}..{}, bytecode_len={}", + address, start, end, evm_bytecode.len())) + }) + .copied() + .collect::>() + }); + (contract.resolc_deployed_bytecode.as_bytes().map(|b| b.to_vec()),immutable_data, BytecodeType::Pvm) + }, + crate::ReviveRuntimeMode::Evm => (contract.evm_deployed_bytecode.as_bytes().map(|b| b.to_vec()), None, BytecodeType::Evm), + }; + + if let Some(code_bytes) = code_bytes { + let upload_result = Pallet::::try_upload_code( + Pallet::::account_id(), + code_bytes.clone(), + code_type, + u64::MAX.into(), + &ExecConfig::new_substrate_tx(), + ); + match upload_result { + Ok(_) => { + let code_hash = H256(sp_io::hashing::keccak_256(&code_bytes)); + let contract_info = ContractInfo::::new(&account_h160, nonce as u32, code_hash) + .expect("Failed to create contract info"); + AccountInfo::::insert_contract(&account_h160, contract_info); + if let Some(data) = immutable_data.and_then(|immutables| immutables.try_into().ok()) + { + Pallet::::set_immutables(account_h160, data).expect("Failed to migrate immutables"); } - }); - - if let Some(code_bytes) = bytecode_to_upload { - let origin = OriginFor::::signed(Pallet::::account_id()); - let upload_result = Pallet::::bare_upload_code( - origin, - code_bytes.clone(), - BalanceOf::::MAX, - ); - - match upload_result { - Ok(result) => { - let code_hash = result.code_hash; - let contract_info = ContractInfo::::new(&account_h160, nonce as u32, code_hash) - .expect("Failed to create contract info"); - AccountInfo::::insert_contract(&account_h160, contract_info); - } - Err(err) => { - tracing::warn!( - address = ?address, - runtime_mode = ?ctx.runtime_mode, - bytecode_len = code_bytes.len(), - error = ?err, - "Failed to upload bytecode to pallet-revive, skipping migration" - ); + } + Err(err) => { + tracing::warn!( + address = ?address, + runtime_mode = ?ctx.runtime_mode, + bytecode_len = code_bytes.len(), + error = ?err, + "Failed to upload bytecode to pallet-revive, skipping migration" + ); + } } + } else { + tracing::info!( + address = ?address, + "no PVM equivalent found for EVM bytecode, skipping migration" + ); } - } else { - tracing::info!( - address = ?address, - "no PVM equivalent found for EVM bytecode, skipping migration" - ); } } } @@ -709,6 +779,12 @@ fn select_evm(ctx: &mut PvmCheatcodeInspectorStrategyContext, data: Ecx<'_, '_, } impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspectorStrategyRunner { + fn is_pvm_enabled(&self, state: &mut foundry_cheatcodes::Cheatcodes) -> bool { + let ctx = get_context_ref_mut(state.strategy.context.as_mut()); + + ctx.using_pvm + } + /// Try handling the `CREATE` within PVM. /// /// If `Some` is returned then the result must be returned immediately, else the call must be @@ -720,7 +796,10 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector input: &dyn CommonCreateInput, executor: &mut dyn foundry_cheatcodes::CheatcodesExecutor, ) -> Option { - let ctx = get_context_ref_mut(state.strategy.context.as_mut()); + let mock_handler = MockHandlerImpl::new(&ecx, &input.caller(), None, None, state); + + let ctx: &mut PvmCheatcodeInspectorStrategyContext = + get_context_ref_mut(state.strategy.context.as_mut()); if !ctx.using_pvm { return None; @@ -772,15 +851,32 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector } }; + let gas_price_pvm = + sp_core::U256::from_little_endian(&U256::from(ecx.tx.gas_price).as_le_bytes()); let mut tracer = Tracer::new(true); let res = execute_with_externalities(|externalities| { externalities.execute_with(|| { tracer.trace(|| { let origin = OriginFor::::signed(AccountId::to_fallback_account_id( - &H160::from_slice(input.caller().as_slice()), + &H160::from_slice(ecx.tx.caller.as_slice()), )); let evm_value = sp_core::U256::from_little_endian(&input.value().as_le_bytes()); + mock_handler.fund_pranked_accounts(ecx.tx.caller); + + // Pre-Dispatch Increments the nonce of the origin, so let's make sure we do + // that here too to replicate the same address generation. + System::inc_account_nonce(AccountId::to_fallback_account_id( + &H160::from_slice(ecx.tx.caller.as_slice()), + )); + + let exec_config = ExecConfig { + bump_nonce: true, + collect_deposit_from_hold: None, + effective_gas_price: Some(gas_price_pvm), + mock_handler: Some(Box::new(mock_handler.clone())), + is_dry_run: None, + }; let code = Code::Upload(code_bytes.clone()); let data = constructor_args; let salt = match input.scheme() { @@ -795,6 +891,13 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector _ => None, }; + // If limits are set to max, enable debug mode to bypass them in revive + if ecx.cfg.limit_contract_code_size == Some(usize::MAX) + || ecx.cfg.limit_contract_initcode_size == Some(usize::MAX) + { + let debug_settings = DebugSettings::new(true); + debug_settings.write_to_storage::(); + } Pallet::::bare_instantiate( origin, evm_value, @@ -804,20 +907,25 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector code, data, salt, - ExecConfig::new_substrate_tx(), + exec_config, ) }) }) }); - let mut gas = Gas::new(input.gas_limit()); if res.result.as_ref().is_ok_and(|r| !r.result.did_revert()) { self.append_recorded_accesses(state, ecx, tracer.get_recorded_accesses()); } post_exec(state, ecx, executor, &mut tracer, false); + mock_handler.update_state_mocks(state); + match &res.result { Ok(result) => { - let _ = gas.record_cost(res.gas_required.ref_time()); + // Only record gas cost if gas metering is not paused. + // When paused, the gas counter should remain frozen. + if !state.gas_metering.paused { + let _ = gas.record_cost(res.gas_required.ref_time()); + } let outcome = if result.result.did_revert() { CreateOutcome { @@ -869,6 +977,10 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector executor: &mut dyn foundry_cheatcodes::CheatcodesExecutor, ) -> Option { let ctx = get_context_ref_mut(state.strategy.context.as_mut()); + let target_address = match call.scheme { + CallScheme::DelegateCall => Some(call.target_address), + _ => None, + }; if !ctx.using_pvm { return None; @@ -889,18 +1001,44 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector } tracing::info!("running call on pallet-revive with {} {:#?}", ctx.runtime_mode, call); + + let gas_price_pvm = + sp_core::U256::from_little_endian(&U256::from(ecx.tx.gas_price).as_le_bytes()); + let mock_handler = MockHandlerImpl::new( + &ecx, + &call.caller, + target_address.as_ref(), + Some(&call.bytecode_address), + state, + ); + let mut tracer = Tracer::new(true); let res = execute_with_externalities(|externalities| { externalities.execute_with(|| { tracer.trace(|| { let origin = OriginFor::::signed(AccountId::to_fallback_account_id( - &H160::from_slice(call.caller.as_slice()), + &H160::from_slice(ecx.tx.caller.as_slice()), )); + mock_handler.fund_pranked_accounts(ecx.tx.caller); + let evm_value = sp_core::U256::from_little_endian(&call.call_value().as_le_bytes()); - let target = H160::from_slice(call.target_address.as_slice()); + let exec_config = ExecConfig { + bump_nonce: true, + collect_deposit_from_hold: None, + effective_gas_price: Some(gas_price_pvm), + mock_handler: Some(Box::new(mock_handler.clone())), + is_dry_run: None, + }; + // If limits are set to max, enable debug mode to bypass them in revive + if ecx.cfg.limit_contract_code_size == Some(usize::MAX) + || ecx.cfg.limit_contract_initcode_size == Some(usize::MAX) + { + let debug_settings = DebugSettings::new(true); + debug_settings.write_to_storage::(); + } Pallet::::bare_call( origin, @@ -910,12 +1048,12 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector // TODO: fixing. BalanceOf::::MAX, call.input.bytes(ecx).to_vec(), - ExecConfig::new_substrate_tx(), + exec_config, ) }) }) }); - + mock_handler.update_state_mocks(state); let mut gas = Gas::new(call.gas_limit); if res.result.as_ref().is_ok_and(|r| !r.did_revert()) { self.append_recorded_accesses(state, ecx, tracer.get_recorded_accesses()); @@ -923,7 +1061,11 @@ impl foundry_cheatcodes::CheatcodeInspectorStrategyExt for PvmCheatcodeInspector post_exec(state, ecx, executor, &mut tracer, call.is_static); match res.result { Ok(result) => { - let _ = gas.record_cost(res.gas_required.ref_time()); + // Only record gas cost if gas metering is not paused. + // When paused, the gas counter should remain frozen. + if !state.gas_metering.paused { + let _ = gas.record_cost(res.gas_required.ref_time()); + } let outcome = if result.did_revert() { tracing::info!("Contract call reverted"); diff --git a/crates/revive-strategy/src/lib.rs b/crates/revive-strategy/src/lib.rs index 711040cd4a417..5ce172f2bfce5 100644 --- a/crates/revive-strategy/src/lib.rs +++ b/crates/revive-strategy/src/lib.rs @@ -23,7 +23,7 @@ mod cheatcodes; mod executor; mod tracing; -pub use cheatcodes::PvmStartupMigration; +pub use cheatcodes::{PvmCheatcodeInspectorStrategyBuilder, PvmStartupMigration}; /// Runtime backend mode for pallet-revive #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] diff --git a/crates/revive-strategy/tests/gas_metering.rs b/crates/revive-strategy/tests/gas_metering.rs new file mode 100644 index 0000000000000..1873e71a6518d --- /dev/null +++ b/crates/revive-strategy/tests/gas_metering.rs @@ -0,0 +1,78 @@ +//! Tests for pauseGasMetering, resumeGasMetering, and resetGasMetering cheatcodes +//! +//! ## Test Strategy +//! +//! These tests verify that gas metering cheatcodes work correctly in both EVM and PVM modes. +//! Gas metering operates at the EVM interpreter level (for testing/debugging), affecting: +//! - How gas is tracked during test execution +//! - The ability to pause/resume/reset gas consumption counting +//! - Gas reporting for test analysis + +use foundry_cheatcodes::Cheatcodes; +use foundry_compilers::resolc::dual_compiled_contracts::DualCompiledContracts; +use revive_strategy::{PvmCheatcodeInspectorStrategyBuilder, ReviveRuntimeMode}; + +#[test] +fn evm_pause_gas_metering_sets_flag() { + let state = Cheatcodes::default(); + + assert!(!state.gas_metering.paused, "Gas metering should not be paused initially"); +} + +#[test] +fn evm_gas_metering_initial_state() { + let state = Cheatcodes::default(); + + assert!(!state.gas_metering.paused); + assert!(!state.gas_metering.touched); + assert!(!state.gas_metering.reset); + assert!(state.gas_metering.paused_frames.is_empty()); + assert!(state.gas_metering.last_call_gas.is_none()); +} + +#[test] +fn pvm_cheatcodes_state_initializes() { + use foundry_cheatcodes::CheatcodeInspectorStrategy; + + let mut state = Cheatcodes::default(); + state.strategy = CheatcodeInspectorStrategy::new_pvm( + DualCompiledContracts::default(), + ReviveRuntimeMode::Pvm, + ); + + assert!(!state.gas_metering.paused, "Gas metering should not be paused initially in PVM"); +} + +#[test] +fn pvm_gas_metering_state_structure() { + use foundry_cheatcodes::CheatcodeInspectorStrategy; + + let mut state = Cheatcodes::default(); + state.strategy = CheatcodeInspectorStrategy::new_pvm( + DualCompiledContracts::default(), + ReviveRuntimeMode::Pvm, + ); + + assert!(!state.gas_metering.paused); + assert!(!state.gas_metering.touched); + assert!(!state.gas_metering.reset); + assert!(state.gas_metering.paused_frames.is_empty()); +} + +#[test] +fn gas_metering_is_independent_of_mode() { + use foundry_cheatcodes::CheatcodeInspectorStrategy; + + let evm_state = Cheatcodes::default(); + + let mut pvm_state = Cheatcodes::default(); + pvm_state.strategy = CheatcodeInspectorStrategy::new_pvm( + DualCompiledContracts::default(), + ReviveRuntimeMode::Pvm, + ); + + assert_eq!( + evm_state.gas_metering.paused, pvm_state.gas_metering.paused, + "Gas metering state should be identical in EVM and PVM modes initially" + ); +} diff --git a/crates/revive-utils/Cargo.toml b/crates/revive-utils/Cargo.toml index 80e74221d3922..bbc084ff525e0 100644 --- a/crates/revive-utils/Cargo.toml +++ b/crates/revive-utils/Cargo.toml @@ -14,14 +14,7 @@ exclude.workspace = true [dependencies] foundry-evm-core.workspace = true foundry-evm-traces.workspace = true -polkadot-sdk = { git = "https://github.com/paritytech/polkadot-sdk.git", branch = "master", features = [ - "experimental", - "runtime", - "polkadot-runtime-common", - "pallet-revive", - "pallet-balances", - "pallet-timestamp" -]} +polkadot-sdk.workspace = true revive-env.workspace = true alloy-primitives.workspace = true diff --git a/testdata/default/revive/EtchTest.t.sol b/testdata/default/revive/EtchTest.t.sol new file mode 100644 index 0000000000000..d470f9080230f --- /dev/null +++ b/testdata/default/revive/EtchTest.t.sol @@ -0,0 +1,132 @@ +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract Adder { + function add(uint256 a, uint256 b) public pure returns (uint256) { + return a * b; // Intentional bug to verify etch works + } +} + +contract NestedAdder { + uint256 public inner_a; + uint256 public inner_b; + + constructor(uint256 a, uint256 b) { + inner_a = a; + inner_b = b; + } + + function nested_call(address target) public returns (uint256) { + // Perform the add call on the target contract address + (bool success, bytes memory data) = + target.call(abi.encodeWithSignature("add(uint256,uint256)", inner_a, inner_b)); + require(success, "Nested call failed"); + uint256 result = abi.decode(data, (uint256)); + assert(success); + return result; + } +} + +contract EtchTest is DSTest { + Vm constant vm = Vm(address(bytes20(uint160(uint256(keccak256("hevm cheat code")))))); + // This is the bytecode for the correct Adder contract above compiled with resolc. + bytes constant code = + hex"50564d00008c0c000000000000010700c13000c00080047106000000000e0000001c0000002a000000350000004700000063616c6c5f646174615f636f707963616c6c5f646174615f6c6f616463616c6c5f646174615f73697a657365616c5f72657475726e7365745f696d6d757461626c655f6461746176616c75655f7472616e7366657272656405110287920463616c6c879e066465706c6f79068bec25028a531a001f004b007c00a500ae00c100cd00fa005e01630192019701bb0212036003b103c0033c048d04b2046c05ec05f5050106180627063d0660061a0769079e07ab07c607ca07080850099511f07b10087b158475010a02013d0700000251050750100209501004b3009511807b10787b15707b1668951580008411e04911384911304911208000330740951820491128501006e6084911584911504911484911408317400a0501821750821858821948821a40d49808d4a707d4870752072e6417501008ec008217188218108219088216d49707d48609d47909989920d48707977720d497075107090050100a61056467330850100c75098377330833090a283e076467330850100e62098378330733093300100a03019511a07b10587b15507b16489515608411e064164916384916304916208000330740956820491628501012370839070000025317045f9517e08477e07b67186471837733080a010182671880771c977720140800000000f7021677ab8736330014951120fe7b10d8017b15d0017b16c8019515e0018411e04921b8014921b0014921a8014921a0018317a0010a05288d02501016a3049511c07b10387b15307b16289515408411f0647664173300189511f07b10087b156475330820330740330048288f0850121a3a063200828910828a18828b088288d4ba0ad4980bd4ab0b98bb20d4a909979920d4b9095209449511c07b10387b15307b16289515408411e06476838883170a01821718821810821908821a7b67187b68107b69087b6a9551c08210388215308216289511403200009511e0fe7b1018017b1510017b160801951520018411f0828310828208829a08828c829b7b1b20829410d32a067b1638d82a06d8cb00821b38dab006828b188298187b1838c93409c969087b1828d869087b1330d8340664b4821938c9b909c96909c98909c92a08c908087b1c38821a20c9ca0a8e8b88aa4085aa01db8b0a8f98821c288ecbdb980bd49c08db8b0a510ace0064768217387b17c0007b127b12c8008217307b17d0009517e0009518c0007b14087b14d80050101ce3fe8217e8007b17288217e0007b17208217f8007b17188217f0007b1710821838958720d88708821ac88a09d8a90ada880a821830c88a0ad88a08821b08c88b0b7b1a707b19687b1760951780009518607b1b7850101e8cfe821790008218980082198000821a88007b67307b68387b69207b6a288217107b67108217187b67188217207b678217287b67089551e0fe821018018215100182160801951120013200501020a602828a10828b18828c088289d4cb0bd4a908d4b808988820d4ba0a97aa20d4a80852083f9511d07b10287b15209515308411f0827a18827810827b0882777b177b1b087b181064187b1a18649750102280059551d08210288215209511303200008218108217087b87088217187b877b861082177b87189551808210788215708216689511800032008217b0018218b8018219a801821aa001d49808d4a707d487075207e70138070000024921380149213001492128017b1720014921580149215001492140010495176001951840019519200149214801501026c7fd82126001821768018218700182197801821b8001821c880182169001821a98017b1ad8007b16d0007b1cc8007b1bc0007b19f8007b18f0007b17e800951700019518e0009519c0007b12e000501028d801821700017b1738821708017b1730821710017b1728821718017b17209517a00050102ab6fc8217b8007b17188216b0008218a8007b1810821aa0007b1a088219207b19588219287b19508219307b19488219387b19407b17787b16707b1868951780009518609519407b1a6033002c9511807b10787b15707b1668951580008411f07b171082878292828b08829308957a207b1a18d87a06c86b0a7b1a08d8ba0cda660c828a10828818829410829918c8ca06d8a60cc88c0c7b1c7b18387b1a307b1b287b17207b19587b14507b13489517409518207b124033002428f7fd821918821b10821008d49b07d46008d47808988820d46707977720d4870752075d646482178800821898007b183882138000821a9000d3b706d8b70cd80308da680cc94a06c9c602d8c606d84a0a821c38c99c0cc9ac0cc96c0cc9b707c98707c90306d4c707d42608d47808d42707988820977720d487075107080050102e19640764685010302e048378836933073300320a03019511f87b103308100002838833070133093300340a03013308491718491710491708490732004911184911104901113307046418491108501038f402390804000256183f0b200304000240013308100002838833070133092433003a0a03019511e87b10107b15087b16828b188294188282828c08829a088295828610829810c8ca09c82503d85305c85909d3a900d8a90ada050ac86805c85a0ad85a00c8b404d88508c84808c88000d86a05d3b008d8b00bda850bd3a606d46808d3c906d8c90cd82305db6c05db8b0552051b7b737b79087b7a107b70188210108215088216951118320033003c9511b07b10487b15409515508411f0491130491128491120140700000000717b484e9518207b173833073300362815029511807b10787b15707b1668951580008411f08282828308828410828818829a829b08829c1082991864767b19187b1c107b1b087b1a7b18387b14307b132895174095182064197b122050103efcfe821750821858821940821a487b67107b68187b697b6a089551808210788215708216689511800032009511f87b10330750104067f89511f87b103307015010425af89511c07b10387b15307b16289515408411f064766417501044a2f95012460632008217108218188219821a087b67107b68187b697b6a089551c0821038821530821628951140320239080800025108c0f8330730000383770a0428b3f87c78017c797c7a027c7b03978808d4980897aa1097bb18d4ba0ad4a8087c79057c7a047c7b067c7c07979908d4a90997bb1097cc18d4cb0bd4b909979920d489027c79097c7a087c7b0a7c7c0b979908d4a90997bb1097cc18d4cb0bd4b9097c7a0d7c7b0c7c7c0e7c780f97aa08d4ba0a97cc10978818d4c808d4a808978820d498037c78117c7a107c7b127c7c13978808d4a80897bb1097cc18d4cb0bd4b8087c7a157c7b147c7c167c791797aa08d4ba0a97cc10979918d4c909d4a909979920d4890a7c78197c79187c7b1a7c7c1b978808d4980897bb1097cc18d4cb0bd4b8087c791d7c7b1c7c7c1e7c771f979908d4b90997cc10977718d4c707d49707977720d487076f776fa86f396f2a7b5a187b59107b58087b57821008821595111032009511d87b10207b15187b161082897b19088289087b1982851082861833082050104ad3006f686f59821a6faa821b086fbb787b18787a10787908787898bc38787c1f98bc30787c1e98bc28787c1d98bc20787c1c98bc18787c1b98bc10787c1a98bb08787b1998ab38787b1798ab30787b1698ab28787b1598ab20787b1498ab18787b1398ab10787b1298aa08787a11989a38787a0f989a30787a0e989a28787a0d989a20787a0c989a18787a0b989a10787a0a98990878790998893878790798893078790698892878790598892078790498891878790398891078790298880878780182102082151882161095112832008b7910520931c8780883881f8488e05638000001253309040002390a040002ae8a093d080400020133081000028377c887073200004969488424892421494892344992a490a4244992a1423515aa3449929290248448523549004944442422224a4892a4925492849294244992244955929294246944442449254992244992a424499294a494244a1489884488442291242549922489244444244992a42422220a494a9224499224a9a41492a449524a924411114922444444848888101111111111111111112112894422442212499224491249922491549224499224494992244992244992a4884424499224499224499251a14a5328a525a9242921443094a4494a4a4a922449922449922449929224a52449920490942449a4242529494992129224294912a5424a4892a4429494a424490a112a294992244992244992244992244992244992244992244992244992244992244992aa24a5942425250955294949922449922449922449922449922449922449922449529224082161280500"; + bytes constant code_evm = + hex"608060405234801561000f575f5ffd5b5060043610610029575f3560e01c8063771602f71461002d575b5f5ffd5b610047600480360381019061004291906100a9565b61005d565b60405161005491906100f6565b60405180910390f35b5f818361006a919061013c565b905092915050565b5f5ffd5b5f819050919050565b61008881610076565b8114610092575f5ffd5b50565b5f813590506100a38161007f565b92915050565b5f5f604083850312156100bf576100be610072565b5b5f6100cc85828601610095565b92505060206100dd85828601610095565b9150509250929050565b6100f081610076565b82525050565b5f6020820190506101095f8301846100e7565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f61014682610076565b915061015183610076565b92508282019050808211156101695761016861010f565b5b9291505056fea2646970667358221220bde9424751d367d702063695cb7d0afb42c0a83d370954296d07e2b1684208fb64736f6c634300081e0033"; + + // Test etching code into an existing contract instance works correctly. + function testEtchExistingContractPvmCode() public { + vm.pvm(true); + Adder adder = new Adder(); + + // Without etch, the add function is broken + uint256 buggy_result = adder.add(1, 2); + assertEq(buggy_result, 2); + + // Etch the correct bytecode into the existing contract + vm.etch(address(adder), code); + uint256 result = adder.add(1, 2); + assertEq(result, 3); + + // Verify that nested calls also work correctly after etch + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(adder)); + assertEq(nested_call_result, 3); + } + + // Test etching code into an existing contract instance works correctly. + function testEtchExistingContractEvmCode() public { + vm.pvm(true); + Adder adder = new Adder(); + + // Without etch, the add function is broken + uint256 buggy_result = adder.add(1, 2); + assertEq(buggy_result, 2); + + // Etch the correct bytecode into the existing contract + vm.etch(address(adder), code_evm); + uint256 result = adder.add(1, 2); + assertEq(result, 3); + + // Verify that nested calls also work correctly after etch + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(adder)); + assertEq(nested_call_result, 3); + } + + // Test etching code into any arbitrary address works correctly. + function testEtchAnyContractPvmCode() public { + vm.pvm(true); + // Etch the correct bytecode into an arbitrary address + address target = address(7070707); + vm.etch(target, code); + (bool success, bytes memory output) = target.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result1 = abi.decode(output, (uint256)); + + assert(success); + assertEq(result1, 3); + + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(target)); + assertEq(nested_call_result, 3); + + // Etch into the zero address as well to verify it works for reserved addresses + address target2 = address(0); + vm.etch(target2, code); + (bool success2, bytes memory output2) = target2.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result2 = abi.decode(output2, (uint256)); + + assert(success2); + assertEq(result2, 3); + + uint256 nested_call_result2 = (new NestedAdder(1, 2)).nested_call(address(target2)); + assertEq(nested_call_result2, 3); + } + + // Test etching code into any arbitrary address works correctly. + function testEtchAnyContractEvmCode() public { + vm.pvm(true); + // Etch the correct bytecode into an arbitrary address + address target = address(7070707); + vm.etch(target, code); + (bool success, bytes memory output) = target.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result1 = abi.decode(output, (uint256)); + + assert(success); + assertEq(result1, 3); + + uint256 nested_call_result = (new NestedAdder(1, 2)).nested_call(address(target)); + assertEq(nested_call_result, 3); + + // Etch into the zero address as well to verify it works for reserved addresses + address target2 = address(0); + vm.etch(target2, code_evm); + (bool success2, bytes memory output2) = target2.call(abi.encodeWithSignature("add(uint256,uint256)", 1, 2)); + uint256 result2 = abi.decode(output2, (uint256)); + + assert(success2); + assertEq(result2, 3); + + uint256 nested_call_result2 = (new NestedAdder(1, 2)).nested_call(address(target2)); + assertEq(nested_call_result2, 3); + } +} diff --git a/testdata/default/revive/EvmToReviveMigration.t.sol b/testdata/default/revive/EvmToReviveMigration.t.sol index cd11baca0b5a9..8e7cd8c3e06a6 100644 --- a/testdata/default/revive/EvmToReviveMigration.t.sol +++ b/testdata/default/revive/EvmToReviveMigration.t.sol @@ -16,6 +16,67 @@ contract SimpleStorage { } } +contract StorageWithImmutables { + uint256 public immutable deployedAt; + address public immutable deployer; + uint256 public immutable magicNumber; + + constructor(uint256 _magicNumber) { + deployedAt = block.timestamp; + deployer = msg.sender; + magicNumber = _magicNumber; + } + + function getDeployedAt() public view returns (uint256) { + return deployedAt; + } + + function getDeployer() public view returns (address) { + return deployer; + } + + function getMagicNumber() public view returns (uint256) { + return magicNumber; + } +} + +interface IAuthorizationCallback { + function onAuthorization(address caller, uint256 value) external returns (bool); +} + +contract CallbackContract { + address public owner; + uint256 public lastValue; + address public lastCaller; + + constructor() { + owner = msg.sender; + } + + // This function calls back to the caller to verify authorization + // Similar to how Morpho calls back to verify permissions + function executeWithCallback(uint256 value) public returns (bool) { + // Call back to the msg.sender to verify authorization + bool authorized = IAuthorizationCallback(msg.sender).onAuthorization(msg.sender, value); + + if (authorized) { + lastValue = value; + lastCaller = msg.sender; + return true; + } + + return false; + } + + function getLastValue() public view returns (uint256) { + return lastValue; + } + + function getLastCaller() public view returns (address) { + return lastCaller; + } +} + contract EvmReviveMigrationTest is DSTest { Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); address alice = address(0x1111); @@ -137,4 +198,49 @@ contract EvmReviveMigrationTest is DSTest { uint256 finalReviveTimestamp = block.timestamp; assertEq(finalReviveTimestamp, newEvmTimestamp, "Timestamp should migrate from EVM to Revive"); } + + function testImmutablesMigration() public { + vm.pvm(false); + + uint256 deploymentTimestamp = 1234567890; + vm.warp(deploymentTimestamp); + uint256 magicNumber = 0x42424242; + StorageWithImmutables immutableContract = new StorageWithImmutables(magicNumber); + + vm.makePersistent(address(immutableContract)); + + assertEq(immutableContract.getDeployedAt(), deploymentTimestamp, "Deployed timestamp should match in EVM"); + assertEq(immutableContract.getDeployer(), address(this), "Deployer should match in EVM"); + assertEq(immutableContract.getMagicNumber(), magicNumber, "Magic number should match in EVM"); + + vm.pvm(true); + + assertEq( + immutableContract.getDeployedAt(), deploymentTimestamp, "Deployed timestamp should be preserved in Revive" + ); + assertEq(immutableContract.getDeployer(), address(this), "Deployer should be preserved in Revive"); + assertEq(immutableContract.getMagicNumber(), magicNumber, "Magic number should be preserved in Revive"); + } + + // Implement the authorization callback interface + function onAuthorization(address caller, uint256 value) external returns (bool) { + // Simple authorization: allow if value is less than 1000 + return value < 1000; + } + + function testCallbackFromRevive() public { + CallbackContract callbackContract = new CallbackContract(); + // Try to execute with authorized value (should succeed) + uint256 authorizedValue = 500; + bool result = callbackContract.executeWithCallback(authorizedValue); + assertTrue(result, "Authorized callback should succeed"); + assertEq(callbackContract.getLastValue(), authorizedValue, "Last value should be updated"); + assertEq(callbackContract.getLastCaller(), address(this), "Last caller should be test contract"); + + // Try to execute with unauthorized value (should fail) + uint256 unauthorizedValue = 1500; + bool result2 = callbackContract.executeWithCallback(unauthorizedValue); + assertTrue(!result2, "Unauthorized callback should fail"); + assertEq(callbackContract.getLastValue(), authorizedValue, "Last value should not be updated"); + } } diff --git a/testdata/default/revive/GasMetering.t.sol b/testdata/default/revive/GasMetering.t.sol new file mode 100644 index 0000000000000..4328464e3d3a0 --- /dev/null +++ b/testdata/default/revive/GasMetering.t.sol @@ -0,0 +1,91 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; + +contract Worker { + uint256 public result; + + function doWork() public returns (uint256) { + uint256 sum = 0; + for (uint256 i = 0; i < 100; i++) { + sum += i; + } + result = sum; + return sum; + } + + function expensiveWork() public returns (uint256) { + uint256 sum = 0; + for (uint256 i = 0; i < 1000; i++) { + sum += i; + } + result = sum; + return sum; + } +} + +contract GasMeteringTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + Worker public worker; + + function setUp() public { + vm.pvm(true); + worker = new Worker(); + } + + function testPauseGasMeteringWithPvmCall() public { + uint256 gasStart = gasleft(); + worker.doWork(); + uint256 gasUsedNormal = gasStart - gasleft(); + + vm.pauseGasMetering(); + uint256 gasPausedStart = gasleft(); + worker.doWork(); + uint256 gasUsedPaused = gasPausedStart - gasleft(); + vm.resumeGasMetering(); + + assertTrue(gasUsedNormal > 0); + assertEq(gasUsedPaused, 0); + } + + function testResumeGasMeteringWithPvmCall() public { + vm.pauseGasMetering(); + worker.doWork(); + vm.resumeGasMetering(); + + uint256 gasStart = gasleft(); + worker.doWork(); + uint256 gasUsed = gasStart - gasleft(); + + assertTrue(gasUsed > 0); + } + + function testResetGasMeteringWithPvmCall() public { + uint256 gasStart = gasleft(); + worker.expensiveWork(); + uint256 gasAfterWork = gasleft(); + uint256 gasConsumed = gasStart - gasAfterWork; + + vm.resetGasMetering(); + uint256 gasAfterReset = gasleft(); + + assertTrue(gasAfterReset > gasAfterWork); + uint256 gasRecovered = gasAfterReset - gasAfterWork; + assertTrue(gasRecovered > gasConsumed / 2); + } + + function testCreateDuringPausedMetering() public { + vm.pauseGasMetering(); + uint256 gasStart = gasleft(); + + Worker newWorker = new Worker(); + newWorker.doWork(); + + uint256 gasUsed = gasStart - gasleft(); + vm.resumeGasMetering(); + + assertEq(gasUsed, 0); + } +} diff --git a/testdata/default/revive/MockCall.t.sol b/testdata/default/revive/MockCall.t.sol new file mode 100644 index 0000000000000..43b04d5d5c29b --- /dev/null +++ b/testdata/default/revive/MockCall.t.sol @@ -0,0 +1,422 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract Mock { + uint256 state = 0; + + function numberA() public pure returns (uint256) { + return 1; + } + + function numberB() public pure returns (uint256) { + return 2; + } + + function numberBPayable() public payable returns (uint256) { + return 2; + } + + function add(uint256 a, uint256 b) public pure returns (uint256) { + return a + b; + } + + function pay(uint256 a) public payable returns (uint256) { + return a; + } + + function noReturnValue() public { + // Does nothing of value, but also ensures that Solidity will 100% + // generate an `extcodesize` check. + state += 1; + } +} + +contract NestedMock { + Mock private inner; + + constructor(Mock _inner) { + inner = _inner; + } + + function sum() public view returns (uint256) { + return inner.numberA() + inner.numberB(); + } + + function sumPay() public returns (uint256) { + return inner.numberA() + inner.numberBPayable{value: 10}(); + } +} + +contract NestedMockDelegateCall { + Mock private inner; + + constructor(Mock _inner) { + inner = _inner; + } + + function sum() public returns (uint256) { + (, bytes memory dataA) = address(inner).delegatecall(abi.encodeWithSelector(Mock.numberA.selector)); + (, bytes memory dataB) = address(inner).delegatecall(abi.encodeWithSelector(Mock.numberB.selector)); + return abi.decode(dataA, (uint256)) + abi.decode(dataB, (uint256)); + } +} + +contract MockCallTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testMockGetters() public { + vm.pvm(true); + Mock target = new Mock(); + + // pre-mock + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + + vm.mockCall(address(target), abi.encodeWithSelector(target.numberB.selector), abi.encode(10)); + + // post-mock + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 10); + } + + function testMockNestedSimple() public { + vm.pvm(true); + + Mock inner = new Mock(); + NestedMock target = new NestedMock(inner); + + // pre-mock + assertEq(target.sum(), 3); + console.log("SUM BEFORE MOCK", address(inner)); + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberB.selector), abi.encode(9)); + + // post-mock + assertEq(target.sum(), 10); + } + + function testMockNestedEmptyAccount() public { + vm.pvm(true); + + Mock inner = Mock(address(100)); + NestedMock target = new NestedMock(inner); + + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberB.selector), abi.encode(9)); + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberA.selector), abi.encode(1)); + + // post-mock + assertEq(target.sum(), 10); + } + + function testMockNestedPayDoesntTransfer() public { + vm.pvm(true); + + Mock inner = new Mock(); + NestedMock target = new NestedMock(inner); + + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberBPayable.selector), abi.encode(9)); + // Check balance of inner before and after call to ensure no ETH was transferred + uint256 balance_before = address(inner).balance; + assertEq(target.sumPay(), 10); + uint256 balance_after = address(inner).balance; + assertEq(balance_before, balance_after); + } + + // Ref: https://github.com/foundry-rs/foundry/issues/8066 + function testMockNestedDelegate() public { + vm.pvm(true); + + Mock inner = new Mock(); + NestedMockDelegateCall target = new NestedMockDelegateCall(inner); + + assertEq(target.sum(), 3); + + vm.mockCall(address(inner), abi.encodeWithSelector(inner.numberB.selector), abi.encode(9)); + + assertEq(target.sum(), 10); + } + + function testMockSelector() public { + vm.pvm(true); + + Mock target = new Mock(); + assertEq(target.add(5, 5), 10); + + vm.mockCall(address(target), abi.encodeWithSelector(target.add.selector), abi.encode(11)); + + assertEq(target.add(5, 5), 11); + } + + function testMockCalldata() public { + vm.pvm(true); + + Mock target = new Mock(); + assertEq(target.add(5, 5), 10); + assertEq(target.add(6, 4), 10); + + vm.mockCall(address(target), abi.encodeWithSelector(target.add.selector, 5, 5), abi.encode(11)); + + assertEq(target.add(5, 5), 11); + assertEq(target.add(6, 4), 10); + } + + function testClearMockedCalls() public { + vm.pvm(true); + + Mock target = new Mock(); + + vm.mockCall(address(target), abi.encodeWithSelector(target.numberB.selector), abi.encode(10)); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 10); + + vm.clearMockedCalls(); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + } + + function testMockCallMultiplePartialMatch() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(10)); + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector, 2), abi.encode(20)); + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector, 2, 3), abi.encode(30)); + + assertEq(mock.add(1, 2), 10); + assertEq(mock.add(2, 2), 20); + assertEq(mock.add(2, 3), 30); + } + + function testMockCallWithValue() public { + vm.pvm(true); + Mock mock = new Mock(); + + vm.mockCall(address(mock), 10, abi.encodeWithSelector(mock.pay.selector), abi.encode(10)); + + assertEq(mock.pay{value: 10}(1), 10); + assertEq(mock.pay(1), 1); + + for (uint256 i = 0; i < 100; i++) { + vm.mockCall(address(mock), i, abi.encodeWithSelector(mock.pay.selector), abi.encode(i * 2)); + } + + assertEq(mock.pay(1), 0); + assertEq(mock.pay{value: 10}(1), 20); + assertEq(mock.pay{value: 50}(1), 100); + } + + function testMockCallWithValueCalldataPrecedence() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCall(address(mock), 10, abi.encodeWithSelector(mock.pay.selector), abi.encode(10)); + vm.mockCall(address(mock), abi.encodeWithSelector(mock.pay.selector, 2), abi.encode(2)); + + assertEq(mock.pay{value: 10}(1), 10); + assertEq(mock.pay{value: 10}(2), 2); + assertEq(mock.pay(2), 2); + } + + function testMockCallEmptyAccount() public { + vm.pvm(true); + + Mock mock = Mock(address(100)); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(10)); + vm.mockCall(address(mock), mock.noReturnValue.selector, abi.encode()); + + assertEq(mock.add(1, 2), 10); + mock.noReturnValue(); + } +} + +contract MockCallRevertTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + error TestError(bytes msg); + + bytes constant ERROR_MESSAGE = "ERROR_MESSAGE"; + + function testMockGettersRevert() public { + vm.pvm(true); + + Mock target = new Mock(); + + // pre-mock + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + + vm.mockCallRevert(address(target), target.numberB.selector, ERROR_MESSAGE); + + // post-mock + assertEq(target.numberA(), 1); + try target.numberB() { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockRevertWithCustomError() public { + vm.pvm(true); + + Mock target = new Mock(); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + + bytes memory customError = abi.encodeWithSelector(TestError.selector, ERROR_MESSAGE); + + vm.mockCallRevert(address(target), abi.encodeWithSelector(target.numberB.selector), customError); + + assertEq(target.numberA(), 1); + try target.numberB() { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(customError)); + } + } + + function testMockNestedRevert() public { + vm.pvm(true); + Mock inner = new Mock(); + NestedMock target = new NestedMock(inner); + + assertEq(target.sum(), 3); + + vm.mockCallRevert(address(inner), abi.encodeWithSelector(inner.numberB.selector), ERROR_MESSAGE); + + try target.sum() { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCalldataRevert() public { + vm.pvm(true); + + Mock target = new Mock(); + assertEq(target.add(5, 5), 10); + assertEq(target.add(6, 4), 10); + + vm.mockCallRevert(address(target), abi.encodeWithSelector(target.add.selector, 5, 5), ERROR_MESSAGE); + + assertEq(target.add(6, 4), 10); + + try target.add(5, 5) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testClearMockRevertedCalls() public { + vm.pvm(true); + + Mock target = new Mock(); + + vm.mockCallRevert(address(target), abi.encodeWithSelector(target.numberB.selector), ERROR_MESSAGE); + + vm.clearMockedCalls(); + + assertEq(target.numberA(), 1); + assertEq(target.numberB(), 2); + } + + function testMockCallRevertPartialMatch() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector, 2), ERROR_MESSAGE); + + assertEq(mock.add(1, 2), 3); + + try mock.add(2, 3) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCallRevertWithValue() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCallRevert(address(mock), 10, abi.encodeWithSelector(mock.pay.selector), ERROR_MESSAGE); + + assertEq(mock.pay(1), 1); + assertEq(mock.pay(2), 2); + + try mock.pay{value: 10}(1) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCallResetsMockCallRevert() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), ERROR_MESSAGE); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(5)); + assertEq(mock.add(2, 3), 5); + } + + function testMockCallRevertResetsMockCall() public { + vm.pvm(true); + + Mock mock = new Mock(); + + vm.mockCall(address(mock), abi.encodeWithSelector(mock.add.selector), abi.encode(5)); + assertEq(mock.add(2, 3), 5); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), ERROR_MESSAGE); + + try mock.add(2, 3) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } + + function testMockCallRevertWithCall() public { + vm.pvm(true); + + Mock mock = new Mock(); + + bytes memory customError = abi.encodeWithSelector(TestError.selector, ERROR_MESSAGE); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), customError); + + (bool success, bytes memory data) = address(mock).call(abi.encodeWithSelector(Mock.add.selector, 2, 3)); + assertEq(success, false); + assertEq(data, customError); + } + + function testMockCallEmptyAccountRevert() public { + vm.pvm(true); + + Mock mock = Mock(address(100)); + + vm.mockCallRevert(address(mock), abi.encodeWithSelector(mock.add.selector), ERROR_MESSAGE); + + try mock.add(2, 3) { + revert(); + } catch (bytes memory err) { + require(keccak256(err) == keccak256(ERROR_MESSAGE)); + } + } +} diff --git a/testdata/default/revive/MockCalls.t.sol b/testdata/default/revive/MockCalls.t.sol new file mode 100644 index 0000000000000..445279db832e5 --- /dev/null +++ b/testdata/default/revive/MockCalls.t.sol @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract MockCallsTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testMockCallsLastShouldPersist() public { + vm.pvm(true); + address mockUser = vm.addr(vm.randomUint()); + address mockErc20 = vm.addr(vm.randomUint()); + bytes memory data = abi.encodeWithSignature("balanceOf(address)", mockUser); + bytes[] memory mocks = new bytes[](2); + mocks[0] = abi.encode(2 ether); + mocks[1] = abi.encode(7.219 ether); + vm.mockCalls(mockErc20, data, mocks); + (, bytes memory ret1) = mockErc20.call(data); + assertEq(abi.decode(ret1, (uint256)), 2 ether); + (, bytes memory ret2) = mockErc20.call(data); + assertEq(abi.decode(ret2, (uint256)), 7.219 ether); + (, bytes memory ret3) = mockErc20.call(data); + assertEq(abi.decode(ret3, (uint256)), 7.219 ether); + } + + function testMockCallsWithValue() public { + vm.pvm(true); + + address mockUser = vm.addr(vm.randomUint()); + address mockErc20 = vm.addr(vm.randomUint()); + bytes memory data = abi.encodeWithSignature("balanceOf(address)", mockUser); + bytes[] memory mocks = new bytes[](3); + mocks[0] = abi.encode(2 ether); + mocks[1] = abi.encode(1 ether); + mocks[2] = abi.encode(6.423 ether); + vm.mockCalls(mockErc20, 1 ether, data, mocks); + (, bytes memory ret1) = mockErc20.call{value: 1 ether}(data); + assertEq(abi.decode(ret1, (uint256)), 2 ether); + (, bytes memory ret2) = mockErc20.call{value: 1 ether}(data); + assertEq(abi.decode(ret2, (uint256)), 1 ether); + (, bytes memory ret3) = mockErc20.call{value: 1 ether}(data); + assertEq(abi.decode(ret3, (uint256)), 6.423 ether); + } + + function testMockCalls() public { + vm.pvm(true); + + address mockUser = vm.addr(vm.randomUint()); + address mockErc20 = vm.addr(vm.randomUint()); + bytes memory data = abi.encodeWithSignature("balanceOf(address)", mockUser); + bytes[] memory mocks = new bytes[](3); + mocks[0] = abi.encode(2 ether); + mocks[1] = abi.encode(1 ether); + mocks[2] = abi.encode(6.423 ether); + vm.mockCalls(mockErc20, data, mocks); + (, bytes memory ret1) = mockErc20.call(data); + assertEq(abi.decode(ret1, (uint256)), 2 ether); + (, bytes memory ret2) = mockErc20.call(data); + assertEq(abi.decode(ret2, (uint256)), 1 ether); + (, bytes memory ret3) = mockErc20.call(data); + assertEq(abi.decode(ret3, (uint256)), 6.423 ether); + } +} diff --git a/testdata/default/revive/MockFunction.t.sol b/testdata/default/revive/MockFunction.t.sol new file mode 100644 index 0000000000000..c3b99ad812215 --- /dev/null +++ b/testdata/default/revive/MockFunction.t.sol @@ -0,0 +1,75 @@ +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract MockFunctionContract { + uint256 public a; + + function mocked_function() public { + a = 321; + } + + function mocked_args_function(uint256 x) public { + a = 321 + x; + } +} + +contract ModelMockFunctionContract { + uint256 public a; + + function mocked_function() public { + a = 123; + } + + function mocked_args_function(uint256 x) public { + a = 123 + x; + } +} + +contract MockFunctionTest is DSTest { + MockFunctionContract my_contract; + ModelMockFunctionContract model_contract; + Vm vm = Vm(HEVM_ADDRESS); + + function setUp() public { + vm.pvm(true); + my_contract = new MockFunctionContract(); + model_contract = new ModelMockFunctionContract(); + } + + function test_mockx_function() public { + vm.mockFunction( + address(my_contract), + address(model_contract), + abi.encodeWithSelector(MockFunctionContract.mocked_function.selector) + ); + my_contract.mocked_function(); + assertEq(my_contract.a(), 123); + } + + function test_mock_function_concrete_args() public { + vm.mockFunction( + address(my_contract), + address(model_contract), + abi.encodeWithSelector(MockFunctionContract.mocked_args_function.selector, 456) + ); + my_contract.mocked_args_function(456); + assertEq(my_contract.a(), 123 + 456); + my_contract.mocked_args_function(567); + assertEq(my_contract.a(), 321 + 567); + } + + function test_mock_function_all_args() public { + vm.mockFunction( + address(my_contract), + address(model_contract), + abi.encodeWithSelector(MockFunctionContract.mocked_args_function.selector) + ); + my_contract.mocked_args_function(678); + assertEq(my_contract.a(), 123 + 678); + my_contract.mocked_args_function(789); + assertEq(my_contract.a(), 123 + 789); + } +} diff --git a/testdata/default/revive/Prank.t.sol b/testdata/default/revive/Prank.t.sol new file mode 100644 index 0000000000000..45383c1b62824 --- /dev/null +++ b/testdata/default/revive/Prank.t.sol @@ -0,0 +1,701 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; +import "../../default/logs/console.sol"; + +contract Victim { + function assertCallerAndOrigin( + address expectedSender, + string memory senderMessage, + address expectedOrigin, + string memory originMessage + ) public view { + require(msg.sender == expectedSender, senderMessage); + require(tx.origin == expectedOrigin, originMessage); + } +} + +contract ConstructorVictim is Victim { + constructor( + address expectedSender, + string memory senderMessage, + address expectedOrigin, + string memory originMessage + ) { + require(msg.sender == expectedSender, senderMessage); + require(tx.origin == expectedOrigin, originMessage); + } +} + +contract NestedVictim { + Victim innerVictim; + + constructor(Victim victim) { + innerVictim = victim; + } + + function assertCallerAndOrigin( + address expectedSender, + string memory senderMessage, + address expectedOrigin, + string memory originMessage + ) public view { + require(msg.sender == expectedSender, senderMessage); + require(tx.origin == expectedOrigin, originMessage); + innerVictim.assertCallerAndOrigin( + address(this), + "msg.sender was incorrectly set for nested victim", + expectedOrigin, + "tx.origin was incorrectly set for nested victim" + ); + } +} + +contract NestedPranker { + Vm constant vm = Vm(address(bytes20(uint160(uint256(keccak256("hevm cheat code")))))); + + address newSender; + address newOrigin; + address oldOrigin; + + constructor(address _newSender, address _newOrigin) { + newSender = _newSender; + newOrigin = _newOrigin; + oldOrigin = tx.origin; + } + + function incompletePrank() public { + vm.startPrank(newSender, newOrigin); + } + + function completePrank(NestedVictim victim) public { + vm.pvm(true); + + victim.assertCallerAndOrigin( + newSender, "msg.sender was not set in nested prank", newOrigin, "tx.origin was not set in nested prank" + ); + + vm.pvm(false); + + vm.stopPrank(); + + vm.pvm(true); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), + "msg.sender was not cleaned up in nested prank", + oldOrigin, + "tx.origin was not cleaned up in nested prank" + ); + } +} + +contract ImplementationTest { + uint256 public num; + address public sender; + + function assertCorrectCaller(address expectedSender) public { + require(msg.sender == expectedSender); + } + + function assertCorrectOrigin(address expectedOrigin) public { + require(tx.origin == expectedOrigin); + } + + function setNum(uint256 _num) public { + num = _num; + } +} + +contract ProxyTest { + uint256 public num; + address public sender; +} + +contract PrankTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testPrankDelegateCallPrank2() public { + vm.pvm(true); + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.prank(address(proxy), true); + // console.log("Proxy address:", address(proxy)); + // console.log("Impl address:", address(impl)); + // console.log("THIS address:", address(this)); + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + + require(success, "prank2: delegate call failed assertCorrectCaller"); + + // Assert storage updates + uint256 num = 42; + vm.prank(address(proxy), true); + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successTwo, "prank2: delegate call failed setNum"); + require(proxy.num() == num, "prank2: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + function testPrankDelegateCallStartPrank2() public { + vm.pvm(true); + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.startPrank(address(proxy), true); + + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + require(success, "startPrank2: delegate call failed assertCorrectCaller"); + + // Assert storage updates + uint256 num = 42; + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successTwo, "startPrank2: delegate call failed setNum"); + require(proxy.num() == num, "startPrank2: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + function testPrankDelegateCallPrank3() public { + address origin = address(999); + vm.assume(isNotReserved(origin)); + vm.pvm(true); + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.prank(address(proxy), origin, true); + + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + require(success, "prank3: delegate call failed assertCorrectCaller"); + + // Assert correct `tx.origin` + vm.prank(address(proxy), origin, true); + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("assertCorrectOrigin(address)", origin)); + require(successTwo, "prank3: delegate call failed assertCorrectOrigin"); + + // Assert storage updates + uint256 num = 42; + vm.prank(address(proxy), address(origin), true); + (bool successThree,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successThree, "prank3: delegate call failed setNum"); + require(proxy.num() == num, "prank3: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + function testPrankDelegateCallStartPrank3(address origin) public { + vm.assume(isNotReserved(origin)); + vm.pvm(true); + + ProxyTest proxy = new ProxyTest(); + ImplementationTest impl = new ImplementationTest(); + vm.startPrank(address(proxy), origin, true); + + // Assert correct `msg.sender` + (bool success,) = + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", address(proxy))); + require(success, "startPrank3: delegate call failed assertCorrectCaller"); + + // Assert correct `tx.origin` + (bool successTwo,) = address(impl).delegatecall(abi.encodeWithSignature("assertCorrectOrigin(address)", origin)); + require(successTwo, "startPrank3: delegate call failed assertCorrectOrigin"); + + // Assert storage updates + uint256 num = 42; + (bool successThree,) = address(impl).delegatecall(abi.encodeWithSignature("setNum(uint256)", num)); + require(successThree, "startPrank3: delegate call failed setNum"); + require(proxy.num() == num, "startPrank3: proxy's storage was not set correctly"); + vm.stopPrank(); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testRevertIfPrankDelegateCalltoEOA() public { + uint256 privateKey = uint256(keccak256(abi.encodePacked("alice"))); + address alice = vm.addr(privateKey); + ImplementationTest impl = new ImplementationTest(); + vm.expectRevert("vm.prank: cannot `prank` delegate call from an EOA"); + vm.prank(alice, true); + // Should fail when EOA pranked with delegatecall. + address(impl).delegatecall(abi.encodeWithSignature("assertCorrectCaller(address)", alice)); + } + + function testPrankSender(address sender) public { + vm.assume(isNotReserved(sender)); + // Perform the prank + vm.pvm(true); + + Victim victim = new Victim(); + vm.prank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", tx.origin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin invariant failed" + ); + } + + function testPrankOrigin(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + address oldOrigin = tx.origin; + vm.pvm(true); + + // Perform the prank + Victim victim = new Victim(); + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin was not cleaned up" + ); + } + + function testPrank1AfterPrank0(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + address oldOrigin = tx.origin; + vm.pvm(true); + + Victim victim = new Victim(); + vm.prank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin was not set during prank" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + + // Overwrite the prank + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function isNotReserved(address addr) internal returns (bool) { + // Check for zero address and common precompiles (addresses 1-9) + if ( + addr == address(0) || addr == address(1) || addr == address(2) || addr == address(3) || addr == address(4) + || addr == address(5) || addr == address(6) || addr == address(7) || addr == address(8) + || addr == address(9) || addr == address(10) || addr == address(11) || addr == address(12) + || addr == address(13) || addr == address(14) || addr == address(15) || addr == address(this) + ) { + return false; + } + return true; + } + + function testPrank0AfterPrank1(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + address oldOrigin = tx.origin; + vm.pvm(true); + Victim victim = new Victim(); + console.log("Balance of sender before prank:", sender.balance); + console.log("Balance of origin before prank:", origin.balance); + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + console.log("After first prank - msg.sender:", address(this)); + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + + // Overwrite the prank + vm.prank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function testStartPrank0AfterPrank1(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Overwrite the prank + vm.startPrank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin invariant failed" + ); + + vm.stopPrank(); + // Ensure we cleaned up correctly after stopping the prank + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function testStartPrank1AfterStartPrank0(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + // Perform the prank + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin was set during prank incorrectly" + ); + + // Ensure prank is still up as startPrank covers multiple calls + victim.assertCallerAndOrigin( + sender, "msg.sender was cleaned up incorrectly", oldOrigin, "tx.origin invariant failed" + ); + + // Overwrite the prank + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin(sender, "msg.sender was not set during prank", origin, "tx.origin was not set"); + + // Ensure prank is still up as startPrank covers multiple calls + victim.assertCallerAndOrigin( + sender, "msg.sender was cleaned up incorrectly", origin, "tx.origin invariant failed" + ); + + vm.stopPrank(); + // Ensure everything is back to normal after stopPrank + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testRevertIfOverwriteUnusedPrank(address sender, address origin) public { + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender, origin); + // try to overwrite the prank. This should fail. + vm.expectRevert("vm.startPrank: cannot overwrite a prank until it is applied at least once"); + vm.startPrank(address(this), origin); + } + + /// forge-config: default.allow_internal_expect_revert = true + function testRevertIfOverwriteUnusedPrankAfterSuccessfulPrank(address sender, address origin) public { + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Set the prank, but not use it + address oldOrigin = tx.origin; + vm.pvm(true); + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was set during prank incorrectly" + ); + vm.startPrank(address(this), origin); + // try to overwrite the prank. This should fail. + vm.expectRevert("vm.startPrank: cannot overwrite a prank until it is applied at least once"); + vm.startPrank(sender, origin); + } + + function testStartPrank0AfterStartPrank1(address sender, address origin) public { + // Perform the prank + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Ensure prank is still ongoing as we haven't called stopPrank + victim.assertCallerAndOrigin( + sender, "msg.sender was cleaned up incorrectly", origin, "tx.origin was cleaned up incorrectly" + ); + + // Overwrite the prank + vm.startPrank(sender); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", oldOrigin, "tx.origin was not reset correctly" + ); + + vm.stopPrank(); + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin invariant failed" + ); + } + + function testPrankConstructorSender(address sender) public { + // Set the prank, but not use it + vm.assume(isNotReserved(sender)); + // Perform the prank + vm.pvm(true); + vm.prank(sender); + ConstructorVictim victim = new ConstructorVictim( + sender, "msg.sender was not set during prank", tx.origin, "tx.origin invariant failed" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin invariant failed" + ); + } + + function testPrankConstructorOrigin(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + // Perform the prank + vm.prank(sender, origin); + ConstructorVictim victim = new ConstructorVictim( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin was not cleaned up" + ); + } + + function testPrankStartStop(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldOrigin = tx.origin; + + // Perform the prank + Victim victim = new Victim(); + vm.startPrank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + victim.assertCallerAndOrigin( + sender, + "msg.sender was not set during prank (call 2)", + origin, + "tx.origin was not set during prank (call 2)" + ); + vm.stopPrank(); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", oldOrigin, "tx.origin was not cleaned up" + ); + } + + function testPrankStartStopConstructor(address sender, address origin) public { + // Perform the prank + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + vm.startPrank(sender, origin); + ConstructorVictim victim = new ConstructorVictim( + sender, "msg.sender was not set during prank", origin, "tx.origin was not set during prank" + ); + new ConstructorVictim( + sender, + "msg.sender was not set during prank (call 2)", + origin, + "tx.origin was not set during prank (call 2)" + ); + vm.stopPrank(); + + // Ensure we cleaned up correctly + victim.assertCallerAndOrigin( + address(this), "msg.sender was not cleaned up", tx.origin, "tx.origin was not cleaned up" + ); + } + + /// This test checks that depth is working correctly with respect + /// to the `startPrank` and `stopPrank` cheatcodes. + /// + /// The nested pranker calls `startPrank` but does not call + /// `stopPrank` at first. + /// + /// Then, we call our victim from the main test: this call + /// should NOT have altered `msg.sender` or `tx.origin`. + /// + /// Then, the nested pranker will complete their prank: this call + /// SHOULD have altered `msg.sender` and `tx.origin`. + /// + /// Each call to the victim calls yet another victim. The expected + /// behavior for this call is that `tx.origin` is altered when + /// the nested pranker calls, otherwise not. In both cases, + /// `msg.sender` should be the address of the first victim. + /// + /// Success case: + /// + /// ┌────┐ ┌───────┐ ┌──────┐ ┌──────┐ ┌────────────┐ + /// │Test│ │Pranker│ │Vm│ │Victim│ │Inner Victim│ + /// └─┬──┘ └───┬───┘ └──┬───┘ └──┬───┘ └─────┬──────┘ + /// │ │ │ │ │ + /// │incompletePrank()│ │ │ │ + /// │────────────────>│ │ │ │ + /// │ │ │ │ │ + /// │ │startPrank()│ │ │ + /// │ │───────────>│ │ │ + /// │ │ │ │ │ + /// │ should not be pranked│ │ │ + /// │──────────────────────────────────────>│ │ + /// │ │ │ │ │ + /// │ │ │ │ should not be pranked │ + /// │ │ │ │────────────────────────>│ + /// │ │ │ │ │ + /// │ completePrank() │ │ │ │ + /// │────────────────>│ │ │ │ + /// │ │ │ │ │ + /// │ │ should be pranked │ │ + /// │ │────────────────────>│ │ + /// │ │ │ │ │ + /// │ │ │ │only tx.origin is pranked│ + /// │ │ │ │────────────────────────>│ + /// │ │ │ │ │ + /// │ │stopPrank() │ │ │ + /// │ │───────────>│ │ │ + /// │ │ │ │ │ + /// │ │should not be pranked│ │ + /// │ │────────────────────>│ │ + /// │ │ │ │ │ + /// │ │ │ │ should not be pranked │ + /// │ │ │ │────────────────────────>│ + /// ┌─┴──┐ ┌───┴───┐ ┌──┴───┐ ┌──┴───┐ ┌─────┴──────┐ + /// │Test│ │Pranker│ │Vm│ │Victim│ │Inner Victim│ + /// └────┘ └───────┘ └──────┘ └──────┘ └────────────┘ + /// If this behavior is incorrectly implemented then the victim + /// will be pranked the first time it is called. + /// + /// !!!!! Currently failing until switch back to evm is added !!!! + // function testPrankComplex(address sender, address origin) public { + // vm.assume(isNotReserved(sender)); + // vm.assume(isNotReserved(origin)); + // // Perform the prank + // address oldOrigin = tx.origin; + + // NestedPranker pranker = new NestedPranker(sender, origin); + + // vm.pvm(true); + // Victim innerVictim = new Victim(); + // NestedVictim victim = new NestedVictim(innerVictim); + + // vm.pvm(false); + // pranker.incompletePrank(); + // vm.pvm(true); + + // victim.assertCallerAndOrigin( + // address(this), + // "msg.sender was altered at an incorrect depth", + // oldOrigin, + // "tx.origin was altered at an incorrect depth" + // ); + + // pranker.completePrank(victim); + // } + + /// Checks that `tx.origin` is set for all subcalls of a `prank`. + /// + /// Ref: issue #1210 + function testTxOriginInNestedPrank(address sender, address origin) public { + vm.assume(isNotReserved(sender)); + vm.assume(isNotReserved(origin)); + // Perform the prank + vm.pvm(true); + address oldSender = msg.sender; + address oldOrigin = tx.origin; + + Victim innerVictim = new Victim(); + NestedVictim victim = new NestedVictim(innerVictim); + + vm.prank(sender, origin); + victim.assertCallerAndOrigin( + sender, "msg.sender was not set correctly", origin, "tx.origin was not set correctly" + ); + } +} + +contract Issue9990 is DSTest { + Vm constant vm = Vm(address(bytes20(uint160(uint256(keccak256("hevm cheat code")))))); + + // TODO: Enable when Etch support is merged. + // function testDelegatePrank() external { + // A a = new A(); + // vm.etch(address(0x11111), hex"11"); + // vm.startPrank(address(0x11111), true); + // (bool success,) = address(a).delegatecall(abi.encodeWithSelector(A.foo.selector)); + // require(success, "MyTest: error calling foo on A"); + // vm.stopPrank(); + // } +} + +// Contracts for DELEGATECALL test case: testDelegatePrank +contract A { + function foo() external { + require(address(0x11111) == msg.sender, "wrong msg.sender in A"); + require(address(0x11111) == address(this), "wrong address(this) in A"); + B b = new B(); + (bool success,) = address(b).call(abi.encodeWithSelector(B.bar.selector)); + require(success, "A: error calling B.bar"); + } +} + +contract B { + function bar() external { + require(address(0x11111) == msg.sender, "wrong msg.sender in B"); + require(0x769A6A5f81bD725e4302751162A7cb30482A222d == address(this), "wrong address(this) in B"); + C c = new C(); + (bool success,) = address(c).delegatecall(abi.encodeWithSelector(C.bar.selector)); + require(success, "B: error calling C.bar"); + } +} + +contract C { + function bar() external view { + require(address(0x11111) == msg.sender, "wrong msg.sender in C"); + require(0x769A6A5f81bD725e4302751162A7cb30482A222d == address(this), "wrong address(this) in C"); + } +} + +contract Counter { + uint256 number; + + function increment() external { + number++; + } +} diff --git a/testdata/default/revive/TxGasPrice.t.sol b/testdata/default/revive/TxGasPrice.t.sol new file mode 100644 index 0000000000000..aa107f4c8fb9a --- /dev/null +++ b/testdata/default/revive/TxGasPrice.t.sol @@ -0,0 +1,71 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; + +contract GasPriceChecker { + function getGasPrice() public view returns (uint256) { + return tx.gasprice; + } +} + +contract TxGasPriceTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testTxGasPriceWorks() public { + // Set a new gas price + uint256 newGasPrice = 100_000_000_000; // 100 gwei + vm.txGasPrice(newGasPrice); + + // Verify the gas price was updated + assertEq(tx.gasprice, newGasPrice, "gas price should be updated"); + } + + function testTxGasPriceWorksWithZero() public { + // Set gas price to zero + vm.txGasPrice(0); + + // Verify the gas price was updated to zero + assertEq(tx.gasprice, 0, "gas price should be zero"); + } + + function testTxGasPriceWorksWithLargeValue() public { + uint256 largeGasPrice = 1_000_000_000_000_000; // 1 million gwei + vm.txGasPrice(largeGasPrice); + + // Verify the gas price was updated + assertEq(tx.gasprice, largeGasPrice, "gas price should be updated to large value"); + } + + function testTxGasPriceWorksInBothModes() public { + // Test in EVM mode + vm.pvm(false); + uint256 evmGasPrice = 50_000_000_000; // 50 gwei + vm.txGasPrice(evmGasPrice); + assertEq(tx.gasprice, evmGasPrice, "gas price should work in EVM mode"); + + // Test in PVM mode + vm.pvm(true); + uint256 pvmGasPrice = 75_000_000_000; // 75 gwei + vm.txGasPrice(pvmGasPrice); + assertEq(tx.gasprice, pvmGasPrice, "gas price should work in PVM mode"); + } + + function testTxGasPricePreservedInPvmContract() public { + // Set gas price in EVM mode + vm.pvm(false); + uint256 evmGasPrice = 50_000_000_000; // 50 gwei + vm.txGasPrice(evmGasPrice); + + // Switch to PVM mode (gas price should be preserved) + vm.pvm(true); + + // Deploy a contract in PVM mode - it should see the preserved gas price + GasPriceChecker checker = new GasPriceChecker(); + + // Call the contract - it should see the same gas price + uint256 gasPriceFromContract = checker.getGasPrice(); + assertEq(gasPriceFromContract, evmGasPrice, "gas price should be preserved in PVM contract"); + } +} From a9e48ffa8dc548bf0234fae7fbdac56eb039d841 Mon Sep 17 00:00:00 2001 From: Diego Date: Sun, 16 Nov 2025 16:15:22 -0300 Subject: [PATCH 26/44] Make checkpoint optional --- .../backend/forked_lazy_backend.rs | 14 ++-- .../lazy_loading/backend/mod.rs | 80 ++++++++++++------- .../lazy_loading/backend/tests.rs | 54 +++++++++++-- .../src/substrate_node/service/client.rs | 13 +-- 4 files changed, 109 insertions(+), 52 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index 79c031636458f..61c25e383804c 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -112,7 +112,7 @@ impl sp_state_machine::StorageIterator sp_state_machine::StorageIterator sp_state_machine::StorageIterator { pub(crate) rpc_client: Option>>, pub(crate) block_hash: Option, - pub(crate) fork_block: Block::Hash, + pub(crate) fork_block: Option, pub(crate) db: Arc>>>, pub(crate) removed_keys: Arc, ()>>>, pub(crate) before_fork: bool, @@ -329,11 +329,9 @@ impl sp_state_machine::Backend Some(data), _ if !self.removed_keys.read().contains_key(key) => { - // Only try remote fetch if RPC client is available let result = - if self.rpc().is_some() { remote_fetch(Some(self.fork_block)) } else { None }; + if self.rpc().is_some() { remote_fetch(self.fork_block) } else { None }; - // Cache state drop(readable_db); self.update_storage(key, &result); @@ -376,7 +374,7 @@ impl sp_state_machine::Backend Ok(Some(hash)), _ if !self.removed_keys.read().contains_key(key) => { if self.rpc().is_some() { - remote_fetch(Some(self.fork_block)) + remote_fetch(self.fork_block) } else { Ok(None) } @@ -449,7 +447,7 @@ impl sp_state_machine::Backend { if self.rpc().is_some() { - remote_fetch(Some(self.fork_block)) + remote_fetch(self.fork_block) } else { None } diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 498947689dab0..7f940ef17aaaa 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -32,7 +32,7 @@ use crate::substrate_node::lazy_loading::rpc_client::RPCClient; pub struct Backend { pub(crate) rpc_client: Option>>, - pub(crate) fork_checkpoint: Block::Header, + pub(crate) fork_checkpoint: Option, states: RwLock>>, pub(crate) blockchain: Blockchain, import_lock: RwLock<()>, @@ -40,7 +40,7 @@ pub struct Backend { } impl Backend { - fn new(rpc_client: Option>>, fork_checkpoint: Block::Header) -> Self { + fn new(rpc_client: Option>>, fork_checkpoint: Option) -> Self { Self { rpc_client: rpc_client.clone(), states: Default::default(), @@ -140,10 +140,11 @@ impl backend::Backend for Backend backend::Backend for Backend sp_blockchain::Result { if hash == Default::default() { + let (fork_block, before_fork) = match &self.fork_checkpoint { + Some(checkpoint) => (Some(checkpoint.hash()), true), + None => (None, false), + }; + return Ok(ForkedLazyBackend:: { rpc_client: self.rpc_client.clone(), block_hash: Some(hash), - fork_block: self.fork_checkpoint.hash(), + fork_block, db: Default::default(), removed_keys: Default::default(), - before_fork: true, + before_fork, }); } @@ -218,29 +224,47 @@ impl backend::Backend for Backend { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: checkpoint.hash(), - db: parent.clone().map_or(Default::default(), |p| p.db), - removed_keys: parent - .map_or(Default::default(), |p| p.removed_keys), - before_fork: false, + let state = match &self.fork_checkpoint { + Some(checkpoint) => { + if header.number().gt(checkpoint.number()) { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: Some(checkpoint.hash()), + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, + } + } else { + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: Some(checkpoint.hash()), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + } + } } - } else { - ForkedLazyBackend:: { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: checkpoint.hash(), - db: Default::default(), - removed_keys: Default::default(), - before_fork: true, + None => { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: None, + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, + } } }; @@ -468,7 +492,7 @@ impl backend::LocalBackend for Backend< pub fn new_backend( rpc_client: Option>>, - checkpoint: Block::Header, + checkpoint: Option, ) -> Result>, polkadot_sdk::sc_service::Error> where Block: BlockT + DeserializeOwned, diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 6e788ab305c0f..6c0750ce4b296 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -213,7 +213,7 @@ fn before_fork_reads_remote_only() { let rpc = std::sync::Arc::new(Rpc::new()); // fork checkpoint at #100 let cp = checkpoint(100); - let backend = Backend::::new(Some(rpc.clone()), cp); + let backend = Backend::::new(Some(rpc.clone()), Some(cp)); // state_at(Default::default()) => before_fork=true let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); @@ -237,7 +237,7 @@ fn before_fork_reads_remote_only() { fn after_fork_first_fetch_caches_subsequent_hits_local() { let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(10); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); // Build a block #11 > checkpoint (#10), with parent #10 let parent = cp.hash(); @@ -272,7 +272,7 @@ fn after_fork_first_fetch_caches_subsequent_hits_local() { fn removed_keys_prevents_remote_fetch() { let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(5); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); // make block #6 let b6 = make_block(6, cp.hash(), vec![]); @@ -298,7 +298,7 @@ fn removed_keys_prevents_remote_fetch() { fn raw_iter_merges_local_then_remote() { let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(7); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); // block #8 let b8 = make_block(8, cp.hash(), vec![]); @@ -342,7 +342,7 @@ fn raw_iter_merges_local_then_remote() { fn blockchain_header_and_number_are_cached() { let rpc = std::sync::Arc::new(Rpc::new()); let cp = checkpoint(3); - let backend = Backend::::new(Some(rpc.clone()), cp.clone()); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); let chain = backend.blockchain(); // prepare one block w/ extrinsics @@ -363,3 +363,47 @@ fn blockchain_header_and_number_are_cached() { assert_eq!(number, 4); assert_eq!(calls_before, calls_after, "number() should be served from cache after header()"); } + +#[test] +fn no_fork_mode_uses_local_db_only() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + assert!(!state.before_fork); + + let key = b":test_key".to_vec(); + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, None); + + state.update_storage(&key, &Some(b"local_value".to_vec())); + + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"local_value".to_vec())); +} + +#[test] +fn no_fork_mode_state_at_default() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + assert!(!state.before_fork); + assert_eq!(state.fork_block, None); + assert!(state.rpc_client.is_none()); +} + +#[test] +fn no_fork_mode_storage_operations() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key1 = b":key1".to_vec(); + let key2 = b":key2".to_vec(); + let key3 = b":key3".to_vec(); + + state.update_storage(&key1, &Some(b"value1".to_vec())); + state.update_storage(&key2, &Some(b"value2".to_vec())); + + assert_eq!(state.storage(&key1).unwrap(), Some(b"value1".to_vec())); + assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); + assert_eq!(state.storage(&key3).unwrap(), None); +} diff --git a/crates/anvil-polkadot/src/substrate_node/service/client.rs b/crates/anvil-polkadot/src/substrate_node/service/client.rs index a0b4a6a162a84..3834a3883a3be 100644 --- a/crates/anvil-polkadot/src/substrate_node/service/client.rs +++ b/crates/anvil-polkadot/src/substrate_node/service/client.rs @@ -9,12 +9,11 @@ use crate::substrate_node::{ }; use parking_lot::Mutex; use polkadot_sdk::{ - parachains_common::opaque::{Block, Header}, + parachains_common::opaque::Block, sc_chain_spec::get_extension, sc_client_api::{BadBlocks, ForkBlocks, execution_extensions::ExecutionExtensions}, sc_service::{self, KeystoreContainer, LocalCallExecutor, TaskManager}, sp_keystore::KeystorePtr, - sp_runtime::traits::Header as HeaderT, }; use std::{collections::HashMap, sync::Arc}; use substrate_runtime::RuntimeApi; @@ -27,15 +26,7 @@ pub fn new_client( executor: WasmExecutor, storage_overrides: Arc>, ) -> Result<(Arc, Arc, KeystorePtr, TaskManager), sc_service::error::Error> { - let checkpoint = Header::new( - genesis_block_number.try_into().unwrap_or(0), - Default::default(), - Default::default(), - Default::default(), - Default::default(), - ); - - let backend = new_lazy_loading_backend(None, checkpoint)?; + let backend = new_lazy_loading_backend(None, None)?; let genesis_block_builder = DevelopmentGenesisBlockBuilder::new( genesis_block_number, From 818c802e263b89324637076091018f8fe48a119f Mon Sep 17 00:00:00 2001 From: Diego Date: Sun, 16 Nov 2025 16:36:37 -0300 Subject: [PATCH 27/44] Remove remove_leaf_block impl --- .../lazy_loading/backend/mod.rs | 150 +++++++----------- 1 file changed, 54 insertions(+), 96 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 7f940ef17aaaa..dccea9b0431ec 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -9,6 +9,7 @@ pub use forked_lazy_backend::ForkedLazyBackend; #[cfg(test)] mod tests; +use parking_lot::RwLock; use polkadot_sdk::{ sc_client_api::{ HeaderBackend, TrieCacheContext, UsageInfo, @@ -26,7 +27,6 @@ use std::{ collections::{HashMap, HashSet}, sync::Arc, }; -use parking_lot::RwLock; use crate::substrate_node::lazy_loading::rpc_client::RPCClient; @@ -40,7 +40,10 @@ pub struct Backend { } impl Backend { - fn new(rpc_client: Option>>, fork_checkpoint: Option) -> Self { + fn new( + rpc_client: Option>>, + fork_checkpoint: Option, + ) -> Self { Self { rpc_client: rpc_client.clone(), states: Default::default(), @@ -214,44 +217,23 @@ impl backend::Backend for Backend { - if header.number().gt(checkpoint.number()) { - let parent = self - .state_at(*header.parent_hash(), TrieCacheContext::Trusted) - .ok(); - - ForkedLazyBackend:: { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: Some(checkpoint.hash()), - db: parent.clone().map_or(Default::default(), |p| p.db), - removed_keys: parent - .map_or(Default::default(), |p| p.removed_keys), - before_fork: false, - } - } else { - ForkedLazyBackend:: { - rpc_client: self.rpc_client.clone(), - block_hash: Some(hash), - fork_block: Some(checkpoint.hash()), - db: Default::default(), - removed_keys: Default::default(), - before_fork: true, - } - } - } - None => { + let (backend, should_write) = self + .states + .read() + .get(&hash) + .cloned() + .map(|state| Ok((state, false))) + .unwrap_or_else(|| { + self.rpc() + .and_then(|rpc| rpc.header(Some(hash)).ok()) + .flatten() + .ok_or(sp_blockchain::Error::UnknownBlock(format!( + "Failed to fetch block header: {hash:?}" + ))) + .map(|header| { + let state = match &self.fork_checkpoint { + Some(checkpoint) => { + if header.number().gt(checkpoint.number()) { let parent = self .state_at(*header.parent_hash(), TrieCacheContext::Trusted) .ok(); @@ -259,19 +241,43 @@ impl backend::Backend for Backend { rpc_client: self.rpc_client.clone(), block_hash: Some(hash), - fork_block: None, + fork_block: Some(checkpoint.hash()), db: parent.clone().map_or(Default::default(), |p| p.db), removed_keys: parent .map_or(Default::default(), |p| p.removed_keys), before_fork: false, } + } else { + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: Some(checkpoint.hash()), + db: Default::default(), + removed_keys: Default::default(), + before_fork: true, + } + } + } + None => { + let parent = self + .state_at(*header.parent_hash(), TrieCacheContext::Trusted) + .ok(); + + ForkedLazyBackend:: { + rpc_client: self.rpc_client.clone(), + block_hash: Some(hash), + fork_block: None, + db: parent.clone().map_or(Default::default(), |p| p.db), + removed_keys: parent + .map_or(Default::default(), |p| p.removed_keys), + before_fork: false, } - }; + } + }; - (state, true) - }) - }, - )?; + (state, true) + }) + })?; if should_write { self.states.write().insert(hash, backend.clone()); @@ -415,56 +421,8 @@ impl backend::Backend for Backend sp_blockchain::Result<()> { - let best_hash = self.blockchain.info().best_hash; - - if best_hash == hash { - return Err(sp_blockchain::Error::Backend( - format!("Can't remove best block {hash:?}",), - )); - } - - let mut storage = self.blockchain.storage.write(); - - let Some(block) = storage.blocks.get(&hash) else { - return Err(sp_blockchain::Error::UnknownBlock(format!("{hash:?}"))); - }; - - let number = *block.header().number(); - let parent_hash = *block.header().parent_hash(); - - if !storage.leaves.contains(number, hash) { - return Err(sp_blockchain::Error::Backend(format!( - "Can't remove non-leaf block {hash:?}", - ))); - } - - if self.pinned_blocks.read().get(&hash).is_some_and(|count| *count > 0) { - return Err(sp_blockchain::Error::Backend(format!( - "Can't remove pinned block {hash:?}", - ))); - } - - let parent_becomes_leaf = if number.is_zero() { - false - } else { - !storage.blocks.iter().any(|(other_hash, stored)| { - *other_hash != hash && stored.header().parent_hash() == &parent_hash - }) - }; - - let mut states = self.states.write(); - - storage.blocks.remove(&hash); - if let Some(entry) = storage.hashes.get(&number) { - if *entry == hash { - storage.hashes.remove(&number); - } - } - states.remove(&hash); - - storage.leaves.remove(hash, number, parent_becomes_leaf.then_some(parent_hash)); - + fn remove_leaf_block(&self, _hash: Block::Hash) -> sp_blockchain::Result<()> { + // Not used Ok(()) } From 84dff980bb7301e2bde1570c17601df687ad3323 Mon Sep 17 00:00:00 2001 From: Diego Date: Sun, 16 Nov 2025 20:27:58 -0300 Subject: [PATCH 28/44] Improve imports and test module --- .../lazy_loading/backend/blockchain.rs | 11 +- .../lazy_loading/backend/mod.rs | 2 +- .../lazy_loading/backend/tests.rs | 413 +++++++++--------- 3 files changed, 216 insertions(+), 210 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs index 53a3d9adf8268..8951cb04c189b 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs @@ -1,11 +1,11 @@ use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; +use parking_lot::RwLock; use polkadot_sdk::{ sc_client_api::{ backend::{self, NewBlockState}, - blockchain::{self, BlockStatus, HeaderBackend}, leaves::LeafSet, }, - sp_blockchain::{self, CachedHeaderMetadata, HeaderMetadata}, + sp_blockchain::{self, BlockStatus, CachedHeaderMetadata, HeaderBackend, HeaderMetadata}, sp_runtime::{ Justification, Justifications, generic::BlockId, @@ -14,7 +14,6 @@ use polkadot_sdk::{ }; use serde::de::DeserializeOwned; use std::{collections::HashMap, sync::Arc}; -use parking_lot::RwLock; #[derive(PartialEq, Eq, Clone)] pub(crate) enum StoredBlock { @@ -290,7 +289,7 @@ impl HeaderBackend for Blockchain blockchain::Info { + fn info(&self) -> sp_blockchain::Info { let storage = self.storage.read(); let finalized_state = if storage.blocks.len() <= 1 { None @@ -298,7 +297,7 @@ impl HeaderBackend for Blockchain HeaderMetadata for Blockchain blockchain::Backend for Blockchain { +impl sp_blockchain::Backend for Blockchain { fn body( &self, hash: Block::Hash, diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index dccea9b0431ec..4fcc998d0300b 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -12,7 +12,7 @@ mod tests; use parking_lot::RwLock; use polkadot_sdk::{ sc_client_api::{ - HeaderBackend, TrieCacheContext, UsageInfo, + TrieCacheContext, UsageInfo, backend::{self, AuxStore}, }, sp_blockchain, diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 6c0750ce4b296..1e3fc9980edff 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -46,11 +46,9 @@ mod mock_rpc { /// storage[(block_hash, key)] = value pub storage: Arc>>, /// storage_hash[(block_hash, key)] = hash - pub storage_hashes: - Arc>>, + pub storage_hashes: Arc>>, /// storage_keys_paged[(block_hash, (prefix,start))] = Vec - pub storage_keys_pages: - Arc), Vec>>>, + pub storage_keys_pages: Arc), Vec>>>, /// headers[hash] = header pub headers: Arc>>, /// blocks[hash] = SignedBlock @@ -188,222 +186,231 @@ mod mock_rpc { } } -type N = u32; -type TestBlockT = TestBlock; +#[cfg(test)] +mod tests { + use super::*; + use polkadot_sdk::sc_client_api::HeaderBackend; -fn make_header(number: N, parent: ::Hash) -> TestHeader { - TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) -} + type N = u32; + type TestBlockT = TestBlock; -fn make_block( - number: N, - parent: ::Hash, - xts: Vec, -) -> TestBlock { - let header = make_header(number, parent); - TestBlock::new(header, xts) -} + fn make_header(number: N, parent: ::Hash) -> TestHeader { + TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) + } -fn checkpoint(n: N) -> TestHeader { - make_header(n, Default::default()) -} + fn make_block( + number: N, + parent: ::Hash, + xts: Vec, + ) -> TestBlock { + let header = make_header(number, parent); + TestBlock::new(header, xts) + } -#[test] -fn before_fork_reads_remote_only() { - let rpc = std::sync::Arc::new(Rpc::new()); - // fork checkpoint at #100 - let cp = checkpoint(100); - let backend = Backend::::new(Some(rpc.clone()), Some(cp)); - - // state_at(Default::default()) => before_fork=true - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - - let key = b":foo".to_vec(); - // prepare remote value at "block_hash = Default::default()" - let at = Default::default(); - rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); - - // read storage - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, Some(b"bar".to_vec())); - - // not cached in DB: second read still goes to RPC - let v2 = state.storage(&key).unwrap(); - assert_eq!(v2, Some(b"bar".to_vec())); - assert!(rpc.counters.storage_calls.load(Ordering::Relaxed) >= 2); -} + fn checkpoint(n: N) -> TestHeader { + make_header(n, Default::default()) + } -#[test] -fn after_fork_first_fetch_caches_subsequent_hits_local() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(10); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - - // Build a block #11 > checkpoint (#10), with parent #10 - let parent = cp.hash(); - let b11 = make_block(11, parent, vec![]); - let h11 = b11.header.hash(); - - rpc.put_header(b11.header.clone()); - rpc.put_block(b11, None); - - // remote storage at fork block (checkpoint hash) - let fork_hash = cp.hash(); - let key = b":k".to_vec(); - rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); - - // Grab state_at(#11): after_fork=false; local DB empty - let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); - - // First read fetches remote and caches - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, Some(b"v".to_vec())); - - // Mutate RPC to detect second call (remove remote value) - // If second read still tries RPC, it would return None; but it should come from cache. - // So we do not change the mock; instead, assert RPC call count increases only once. - let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); - let _ = state.storage(&key).unwrap(); - let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); - assert_eq!(calls_before, calls_after, "second hit should be served from cache"); -} + #[test] + fn before_fork_reads_remote_only() { + let rpc = std::sync::Arc::new(Rpc::new()); + // fork checkpoint at #100 + let cp = checkpoint(100); + let backend = Backend::::new(Some(rpc.clone()), Some(cp)); + + // state_at(Default::default()) => before_fork=true + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key = b":foo".to_vec(); + // prepare remote value at "block_hash = Default::default()" + let at = Default::default(); + rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); + + // read storage + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"bar".to_vec())); + + // not cached in DB: second read still goes to RPC + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"bar".to_vec())); + assert!(rpc.counters.storage_calls.load(Ordering::Relaxed) >= 2); + } -#[test] -fn removed_keys_prevents_remote_fetch() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(5); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - - // make block #6 - let b6 = make_block(6, cp.hash(), vec![]); - rpc.put_header(b6.header.clone()); - rpc.put_block(b6.clone(), None); - let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); - - // mark key as removed - let key = b":dead".to_vec(); - state.removed_keys.write().insert(key.clone(), ()); - - // Even if remote has a value, backend must not fetch it - rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); - let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); - let v = state.storage(&key).unwrap(); - let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); - - assert!(v.is_none()); - assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); -} + #[test] + fn after_fork_first_fetch_caches_subsequent_hits_local() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // Build a block #11 > checkpoint (#10), with parent #10 + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); + + // remote storage at fork block (checkpoint hash) + let fork_hash = cp.hash(); + let key = b":k".to_vec(); + rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); + + // Grab state_at(#11): after_fork=false; local DB empty + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read fetches remote and caches + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"v".to_vec())); + + // Mutate RPC to detect second call (remove remote value) + // If second read still tries RPC, it would return None; but it should come from cache. + // So we do not change the mock; instead, assert RPC call count increases only once. + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let _ = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + assert_eq!(calls_before, calls_after, "second hit should be served from cache"); + } -#[test] -fn raw_iter_merges_local_then_remote() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(7); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - - // block #8 - let b8 = make_block(8, cp.hash(), vec![]); - rpc.put_header(b8.header.clone()); - rpc.put_block(b8.clone(), None); - let state = backend.state_at(b8.header.hash(), TrieCacheContext::Trusted).unwrap(); - - // Preload local DB with key "a1" - state.update_storage(b"a1", &Some(b"v1".to_vec())); - - // Ensure storage_root is computed to make the key visible to raw_iter - let _ = state - .db - .write() - .storage_root(vec![(b"a1".as_ref(), Some(b"v1".as_ref()))].into_iter(), StateVersion::V1); - - // Remote has only "a2" under same prefix at fork block (not "a1") - rpc.put_storage_keys_page(cp.hash(), b"a".to_vec(), vec![StorageKey(b"a2".to_vec())]); - rpc.put_storage(cp.hash(), StorageKey(b"a2".to_vec()), StorageData(b"v2".to_vec())); - - let mut args = polkadot_sdk::sp_state_machine::IterArgs::default(); - args.prefix = Some(&b"a"[..]); - let mut it = state.raw_iter(args).unwrap(); - - // next_pair should return ("a1","v1") from local - let p1 = it.next_pair(&state).unwrap().unwrap(); - assert_eq!(p1.0, b"a1".to_vec()); - assert_eq!(p1.1, b"v1".to_vec()); - - // next_pair should now bring remote ("a2","v2") - let p2 = it.next_pair(&state).unwrap().unwrap(); - assert_eq!(p2.0, b"a2".to_vec()); - assert_eq!(p2.1, b"v2".to_vec()); - - // done - assert!(it.next_pair(&state).is_none()); - assert!(it.was_complete()); -} + #[test] + fn removed_keys_prevents_remote_fetch() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // mark key as removed + let key = b":dead".to_vec(); + state.removed_keys.write().insert(key.clone(), ()); + + // Even if remote has a value, backend must not fetch it + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let v = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + + assert!(v.is_none()); + assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); + } -#[test] -fn blockchain_header_and_number_are_cached() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(3); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - let chain = backend.blockchain(); - - // prepare one block w/ extrinsics - let xts: Vec = vec![]; - let b4 = make_block(4, cp.hash(), xts); - let h4 = b4.header().hash(); - rpc.put_block(b4, None); - - // first header() fetches RPC and caches as Full - let h = chain.header(h4).unwrap().unwrap(); - assert_eq!(h.hash(), h4); - - // number() should now return from cache (no extra RPC needed) - let calls_before = rpc.counters.block_calls.load(Ordering::Relaxed); - let number = chain.number(h4).unwrap().unwrap(); - let calls_after = rpc.counters.block_calls.load(Ordering::Relaxed); - - assert_eq!(number, 4); - assert_eq!(calls_before, calls_after, "number() should be served from cache after header()"); -} + #[test] + fn raw_iter_merges_local_then_remote() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(7); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // block #8 + let b8 = make_block(8, cp.hash(), vec![]); + rpc.put_header(b8.header.clone()); + rpc.put_block(b8.clone(), None); + let state = backend.state_at(b8.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // Preload local DB with key "a1" + state.update_storage(b"a1", &Some(b"v1".to_vec())); + + // Ensure storage_root is computed to make the key visible to raw_iter + let _ = state.db.write().storage_root( + vec![(b"a1".as_ref(), Some(b"v1".as_ref()))].into_iter(), + StateVersion::V1, + ); + + // Remote has only "a2" under same prefix at fork block (not "a1") + rpc.put_storage_keys_page(cp.hash(), b"a".to_vec(), vec![StorageKey(b"a2".to_vec())]); + rpc.put_storage(cp.hash(), StorageKey(b"a2".to_vec()), StorageData(b"v2".to_vec())); + + let mut args = polkadot_sdk::sp_state_machine::IterArgs::default(); + args.prefix = Some(&b"a"[..]); + let mut it = state.raw_iter(args).unwrap(); + + // next_pair should return ("a1","v1") from local + let p1 = it.next_pair(&state).unwrap().unwrap(); + assert_eq!(p1.0, b"a1".to_vec()); + assert_eq!(p1.1, b"v1".to_vec()); + + // next_pair should now bring remote ("a2","v2") + let p2 = it.next_pair(&state).unwrap().unwrap(); + assert_eq!(p2.0, b"a2".to_vec()); + assert_eq!(p2.1, b"v2".to_vec()); + + // done + assert!(it.next_pair(&state).is_none()); + assert!(it.was_complete()); + } + + #[test] + fn blockchain_header_and_number_are_cached() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(3); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + let chain = backend.blockchain(); + + // prepare one block w/ extrinsics + let xts: Vec = vec![]; + let b4 = make_block(4, cp.hash(), xts); + let h4 = b4.header().hash(); + rpc.put_block(b4, None); + + // first header() fetches RPC and caches as Full + let h = chain.header(h4).unwrap().unwrap(); + assert_eq!(h.hash(), h4); + + // number() should now return from cache (no extra RPC needed) + let calls_before = rpc.counters.block_calls.load(Ordering::Relaxed); + let number = chain.number(h4).unwrap().unwrap(); + let calls_after = rpc.counters.block_calls.load(Ordering::Relaxed); + + assert_eq!(number, 4); + assert_eq!( + calls_before, calls_after, + "number() should be served from cache after header()" + ); + } -#[test] -fn no_fork_mode_uses_local_db_only() { - let backend = Backend::::new(None, None); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + #[test] + fn no_fork_mode_uses_local_db_only() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - assert!(!state.before_fork); + assert!(!state.before_fork); - let key = b":test_key".to_vec(); - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, None); + let key = b":test_key".to_vec(); + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, None); - state.update_storage(&key, &Some(b"local_value".to_vec())); + state.update_storage(&key, &Some(b"local_value".to_vec())); - let v2 = state.storage(&key).unwrap(); - assert_eq!(v2, Some(b"local_value".to_vec())); -} + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"local_value".to_vec())); + } -#[test] -fn no_fork_mode_state_at_default() { - let backend = Backend::::new(None, None); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + #[test] + fn no_fork_mode_state_at_default() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - assert!(!state.before_fork); - assert_eq!(state.fork_block, None); - assert!(state.rpc_client.is_none()); -} + assert!(!state.before_fork); + assert_eq!(state.fork_block, None); + assert!(state.rpc_client.is_none()); + } -#[test] -fn no_fork_mode_storage_operations() { - let backend = Backend::::new(None, None); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + #[test] + fn no_fork_mode_storage_operations() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - let key1 = b":key1".to_vec(); - let key2 = b":key2".to_vec(); - let key3 = b":key3".to_vec(); + let key1 = b":key1".to_vec(); + let key2 = b":key2".to_vec(); + let key3 = b":key3".to_vec(); - state.update_storage(&key1, &Some(b"value1".to_vec())); - state.update_storage(&key2, &Some(b"value2".to_vec())); + state.update_storage(&key1, &Some(b"value1".to_vec())); + state.update_storage(&key2, &Some(b"value2".to_vec())); - assert_eq!(state.storage(&key1).unwrap(), Some(b"value1".to_vec())); - assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); - assert_eq!(state.storage(&key3).unwrap(), None); + assert_eq!(state.storage(&key1).unwrap(), Some(b"value1".to_vec())); + assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); + assert_eq!(state.storage(&key3).unwrap(), None); + } } From 4a3e189056d779463e90ae94d3bb75d031dce3b8 Mon Sep 17 00:00:00 2001 From: Diego Date: Sun, 16 Nov 2025 21:15:14 -0300 Subject: [PATCH 29/44] Improve genesis block check --- .../lazy_loading/backend/mod.rs | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 4fcc998d0300b..6d37af4245313 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -314,10 +314,10 @@ impl backend::Backend for Backend backend::Backend for Backend backend::Backend for Backend backend::Backend for Backend Zero::zero() + // Get the genesis block number to use as lower bound + let genesis_number = storage + .blocks + .get(&storage.genesis_hash) + .map(|block| *block.header().number()) + .unwrap_or(Zero::zero()); + + // Decrement finalized_number until we find a valid block, but don't go below genesis + while storage.finalized_number > genesis_number && !storage.hashes.contains_key(&storage.finalized_number) { storage.finalized_number = storage.finalized_number.saturating_sub(One::one()); From 27844e8fd209898262950ab865ce0c3ea3849221 Mon Sep 17 00:00:00 2001 From: Diego Date: Sun, 16 Nov 2025 21:20:48 -0300 Subject: [PATCH 30/44] Return early if we want to revert 0 blocks --- .../src/substrate_node/lazy_loading/backend/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 6d37af4245313..7950c9cc6be06 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -293,7 +293,7 @@ impl backend::Backend for Backend sp_blockchain::Result<(NumberFor, HashSet)> { let mut storage = self.blockchain.storage.write(); - if storage.blocks.is_empty() { + if storage.blocks.is_empty() || n.is_zero() { return Ok((Zero::zero(), HashSet::new())); } From 54ce24170a406afcfc7873a845f8122ef5072d32 Mon Sep 17 00:00:00 2001 From: Diego Date: Sun, 16 Nov 2025 23:10:36 -0300 Subject: [PATCH 31/44] Rename reverted_finalized to reverted_up_to_finalized --- .../src/substrate_node/lazy_loading/backend/mod.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 7950c9cc6be06..964128fd487bb 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -311,7 +311,7 @@ impl backend::Backend for Backend::zero(); - let mut reverted_finalized = HashSet::new(); + let mut reverted_up_to_finalized = HashSet::new(); let mut current_hash = storage.best_hash; @@ -362,7 +362,7 @@ impl backend::Backend for Backend backend::Backend for Backend backend::Backend for Backend sp_blockchain::Result<()> { From 9d7039eecdd935715b47ed54ff53bb87054f31e1 Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 17 Nov 2025 16:01:15 -0300 Subject: [PATCH 32/44] Implement lazy loading for child storage keys --- .../backend/forked_lazy_backend.rs | 139 ++++++++++- .../lazy_loading/backend/tests.rs | 231 ++++++++++++++++++ .../substrate_node/lazy_loading/rpc_client.rs | 25 +- 3 files changed, 387 insertions(+), 8 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index 61c25e383804c..864373fcb4fca 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -292,6 +292,23 @@ impl ForkedLazyBackend { } } + pub(crate) fn update_child_storage( + &self, + child_info: &ChildInfo, + key: &[u8], + value: &Option>, + ) { + if let Some(val) = value { + let mut entries: HashMap, StorageCollection> = Default::default(); + entries.insert( + Some(child_info.clone()), + vec![(key.to_vec(), Some(val.clone()))], + ); + + self.db.write().insert(entries, StateVersion::V1); + } + } + #[inline] pub(crate) fn rpc(&self) -> Option<&dyn RPCClient> { self.rpc_client.as_deref() @@ -409,7 +426,46 @@ impl sp_state_machine::Backend Result, Self::Error> { - Ok(self.db.read().child_storage(child_info, key).ok().flatten()) + let remote_fetch = |block: Option| -> Option> { + self.rpc() + .and_then(|rpc| { + rpc.child_storage(child_info, StorageKey(key.to_vec()), block).ok() + }) + .flatten() + .map(|v| v.0) + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + // No RPC client, try to read from local DB + let readable_db = self.db.read(); + return Ok(readable_db.child_storage(child_info, key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_storage = readable_db.child_storage(child_info, key); + + match maybe_storage { + Ok(Some(value)) => Ok(Some(value)), + Ok(None) => { + if self.removed_keys.read().contains_key(key) { + return Ok(None); + } + + if let Some(remote_value) = remote_fetch(self.fork_block) { + drop(readable_db); + self.update_child_storage(child_info, key, &Some(remote_value.clone())); + Ok(Some(remote_value)) + } else { + Ok(None) + } + } + Err(e) => Err(e), + } } fn child_storage_hash( @@ -417,7 +473,38 @@ impl sp_state_machine::Backend Result as sp_core::Hasher>::Out>, Self::Error> { - Ok(self.db.read().child_storage_hash(child_info, key).ok().flatten()) + let remote_fetch = |block: Option| -> Option { + self.rpc() + .and_then(|rpc| { + rpc.child_storage_hash(child_info, StorageKey(key.to_vec()), block).ok() + }) + .flatten() + }; + + // When before_fork, try RPC first, then fall back to local DB + if self.before_fork { + if self.rpc().is_some() { + return Ok(remote_fetch(self.block_hash)); + } else { + let readable_db = self.db.read(); + return Ok(readable_db.child_storage_hash(child_info, key).ok().flatten()); + } + } + + let readable_db = self.db.read(); + let maybe_hash = readable_db.child_storage_hash(child_info, key); + + match maybe_hash { + Ok(Some(hash)) => Ok(Some(hash)), + Ok(None) => { + if self.removed_keys.read().contains_key(key) { + return Ok(None); + } + + Ok(remote_fetch(self.fork_block)) + } + Err(e) => Err(e), + } } fn next_storage_key( @@ -431,20 +518,17 @@ impl sp_state_machine::Backend Some(next_key), - // If not found locally and key is not marked as removed, fetch remotely _ if !self.removed_keys.read().contains_key(key) => { if self.rpc().is_some() { remote_fetch(self.fork_block) @@ -473,7 +557,48 @@ impl sp_state_machine::Backend Result, Self::Error> { - Ok(self.db.read().next_child_storage_key(child_info, key).ok().flatten()) + let remote_fetch = |block: Option| { + let start_key = Some(StorageKey(key.to_vec())); + self.rpc() + .and_then(|rpc| { + rpc.child_storage_keys_paged(child_info, None, 2, start_key.clone(), block).ok() + }) + .and_then(|keys| keys.last().cloned()) + }; + + // When before_fork, try RPC first, then fall back to local DB + let maybe_next_key = if self.before_fork { + if self.rpc().is_some() { + remote_fetch(self.block_hash) + } else { + self.db.read().next_child_storage_key(child_info, key).ok().flatten() + } + } else { + let next_child_key = self.db.read().next_child_storage_key(child_info, key); + match next_child_key { + Ok(Some(next_key)) => Some(next_key), + _ if !self.removed_keys.read().contains_key(key) => { + if self.rpc().is_some() { + remote_fetch(self.fork_block) + } else { + None + } + } + // Otherwise, there's no next key + _ => None, + } + } + .filter(|next_key| next_key != key); + + tracing::trace!( + target: LAZY_LOADING_LOG_TARGET, + "next_child_storage_key: (child_info: {:?}, key: {:?}, next_key: {:?})", + child_info, + hex::encode(key), + maybe_next_key.clone().map(hex::encode) + ); + + Ok(maybe_next_key) } fn storage_root<'a>( diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 1e3fc9980edff..6fe2372365ed1 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -49,6 +49,14 @@ mod mock_rpc { pub storage_hashes: Arc>>, /// storage_keys_paged[(block_hash, (prefix,start))] = Vec pub storage_keys_pages: Arc), Vec>>>, + /// child_storage[(block_hash, child_storage_key, key)] = value + pub child_storage: Arc, StorageKey), StorageData>>>, + /// child_storage_hashes[(block_hash, child_storage_key, key)] = hash + pub child_storage_hashes: + Arc, StorageKey), Block::Hash>>>, + /// child_storage_keys_pages[(block_hash, child_storage_key, prefix)] = Vec + pub child_storage_keys_pages: + Arc, Vec), Vec>>>, /// headers[hash] = header pub headers: Arc>>, /// blocks[hash] = SignedBlock @@ -62,6 +70,9 @@ mod mock_rpc { storage: std::sync::Arc::new(RwLock::new(BTreeMap::new())), storage_hashes: std::sync::Arc::new(RwLock::new(BTreeMap::new())), storage_keys_pages: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + child_storage: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + child_storage_hashes: std::sync::Arc::new(RwLock::new(BTreeMap::new())), + child_storage_keys_pages: std::sync::Arc::new(RwLock::new(BTreeMap::new())), headers: std::sync::Arc::new(RwLock::new(BTreeMap::new())), blocks: std::sync::Arc::new(RwLock::new(BTreeMap::new())), } @@ -85,6 +96,36 @@ mod mock_rpc { let full = SignedBlock { block, justifications: just }; self.blocks.write().insert(full.block.header().hash(), full); } + + pub fn put_child_storage( + &self, + at: Block::Hash, + child_storage_key: Vec, + key: StorageKey, + val: StorageData, + ) { + self.child_storage.write().insert((at, child_storage_key, key), val); + } + + pub fn put_child_storage_hash( + &self, + at: Block::Hash, + child_storage_key: Vec, + key: StorageKey, + hash: Block::Hash, + ) { + self.child_storage_hashes.write().insert((at, child_storage_key, key), hash); + } + + pub fn put_child_storage_keys_page( + &self, + at: Block::Hash, + child_storage_key: Vec, + prefix: Vec, + keys: Vec, + ) { + self.child_storage_keys_pages.write().insert((at, child_storage_key, prefix), keys); + } } impl RPCClient for Rpc { @@ -183,6 +224,66 @@ mod mock_rpc { ) -> Result { todo!() } + + fn child_storage( + &self, + child_info: &polkadot_sdk::sp_storage::ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + let bh = at.unwrap_or_default(); + let child_storage_key = child_info.storage_key().to_vec(); + let map = self.child_storage.read(); + Ok(map.get(&(bh, child_storage_key, key)).cloned()) + } + + fn child_storage_hash( + &self, + child_info: &polkadot_sdk::sp_storage::ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + let bh = at.unwrap_or_default(); + let child_storage_key = child_info.storage_key().to_vec(); + let map = self.child_storage_hashes.read(); + Ok(map.get(&(bh, child_storage_key, key)).copied()) + } + + fn child_storage_keys_paged( + &self, + child_info: &polkadot_sdk::sp_storage::ChildInfo, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, jsonrpsee::core::ClientError> { + use std::cmp::min; + + let bh = at.unwrap_or_default(); + let child_storage_key = child_info.storage_key().to_vec(); + let prefix = key.map(|k| k.0).unwrap_or_default(); + let start = start_key.map(|k| k.0); + + let map = self.child_storage_keys_pages.read(); + let mut all = + map.get(&(bh, child_storage_key, prefix.clone())).cloned().unwrap_or_default(); + + all.sort_by(|a, b| a.0.cmp(&b.0)); + + let mut filtered: Vec = + all.into_iter().filter(|k| k.0.starts_with(&prefix)).collect(); + + if let Some(s) = start { + if let Some(pos) = filtered.iter().position(|k| k.0 == s) { + filtered = filtered.into_iter().skip(pos + 1).collect(); + } else { + filtered.retain(|k| k.0 > s); + } + } + + let take = min(filtered.len(), count as usize); + Ok(filtered.into_iter().take(take).map(|k| k.0).collect()) + } } } @@ -413,4 +514,134 @@ mod tests { assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); assert_eq!(state.storage(&key3).unwrap(), None); } + + #[test] + fn child_storage_before_fork_reads_remote() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(100); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp)); + + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let child_info = ChildInfo::new_default(b"child1"); + let key = b":child_key".to_vec(); + let at = Default::default(); + + // Put child storage in mock RPC + rpc.put_child_storage( + at, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"child_value".to_vec()), + ); + + // Read child storage - should fetch from RPC + let v = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v, Some(b"child_value".to_vec())); + } + + #[test] + fn child_storage_after_fork_caches() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); + + let child_info = ChildInfo::new_default(b"child2"); + let key = b":child_key2".to_vec(); + let fork_hash = cp.hash(); + + rpc.put_child_storage( + fork_hash, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"cached_value".to_vec()), + ); + + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read - should cache + let v1 = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v1, Some(b"cached_value".to_vec())); + + // Second read - should come from cache (we can verify by checking the value is still there) + let v2 = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v2, Some(b"cached_value".to_vec())); + } + + #[test] + fn child_storage_hash_reads_from_rpc() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(50); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b51 = make_block(51, parent, vec![]); + let h51 = b51.header.hash(); + + rpc.put_header(b51.header.clone()); + rpc.put_block(b51, None); + + let child_info = ChildInfo::new_default(b"child3"); + let key = b":hash_key".to_vec(); + let fork_hash = cp.hash(); + let expected_hash = ::Hash::default(); + + rpc.put_child_storage_hash( + fork_hash, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + expected_hash, + ); + + let state = backend.state_at(h51, TrieCacheContext::Trusted).unwrap(); + + let hash = state.child_storage_hash(&child_info, &key).unwrap(); + assert_eq!(hash, Some(expected_hash)); + } + + #[test] + fn next_child_storage_key_uses_paged() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); + + rpc.put_header(b21.header.clone()); + rpc.put_block(b21, None); + + let child_info = ChildInfo::new_default(b"child4"); + let fork_hash = cp.hash(); + + // Put a page of keys + rpc.put_child_storage_keys_page( + fork_hash, + child_info.storage_key().to_vec(), + vec![], + vec![StorageKey(b"key1".to_vec()), StorageKey(b"key2".to_vec())], + ); + + let state = backend.state_at(h21, TrieCacheContext::Trusted).unwrap(); + + // Get next key after "key1" should be "key2" + let next = state.next_child_storage_key(&child_info, b"key1").unwrap(); + assert_eq!(next, Some(b"key2".to_vec())); + } } diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs index 3499ebe82b828..5dcaafc291652 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/rpc_client.rs @@ -4,7 +4,7 @@ use polkadot_sdk::{ sp_api::__private::HeaderT, sp_runtime::{generic::SignedBlock, traits::Block as BlockT}, sp_state_machine, - sp_storage::{StorageData, StorageKey}, + sp_storage::{ChildInfo, StorageData, StorageKey}, }; use serde::de::DeserializeOwned; @@ -47,4 +47,27 @@ pub trait RPCClient: Send + Sync + std::fmt::D start_key: Option, at: Option, ) -> Result, ClientError>; + + fn child_storage( + &self, + child_info: &ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, ClientError>; + + fn child_storage_hash( + &self, + child_info: &ChildInfo, + key: StorageKey, + at: Option, + ) -> Result, ClientError>; + + fn child_storage_keys_paged( + &self, + child_info: &ChildInfo, + key: Option, + count: u32, + start_key: Option, + at: Option, + ) -> Result, ClientError>; } From c8934223019917e65d7b93ec5fdf9dd8904647a7 Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 17 Nov 2025 17:45:02 -0300 Subject: [PATCH 33/44] Remove custom RawIter and replaced it for a wrapper --- .../backend/block_import_operation.rs | 4 +- .../backend/forked_lazy_backend.rs | 252 +----------------- .../lazy_loading/backend/tests.rs | 54 +--- 3 files changed, 15 insertions(+), 295 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs index bf1d70ac7d837..0c2c9664efd7f 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs @@ -177,8 +177,8 @@ impl backend::BlockImportOperation pub(crate) fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { if storage .top - .iter() - .any(|(k, _)| polkadot_sdk::sp_core::storage::well_known_keys::is_child_storage_key(k)) + .keys() + .any(|k| polkadot_sdk::sp_core::storage::well_known_keys::is_child_storage_key(k)) { return Err(sp_blockchain::Error::InvalidState); } diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index 864373fcb4fca..70b81aa9af624 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -15,28 +15,14 @@ use polkadot_sdk::{ sp_trie::{self, PrefixedMemoryDB}, }; use serde::de::DeserializeOwned; -use std::{collections::HashMap, marker::PhantomData, sync::Arc}; +use std::{collections::HashMap, sync::Arc}; /// DB-backed patricia trie state, transaction type is an overlay of changes to commit. pub type DbState = TrieBackend>>, HashingFor>; -/// A struct containing arguments for iterating over the storage. -#[derive(Default)] -pub struct RawIterArgs { - /// The prefix of the keys over which to iterate. - pub prefix: Option>, - - /// The prefix from which to start the iteration from. - /// - /// This is inclusive and the iteration will include the key which is specified here. - pub start_at: Option>, -} - -/// A raw iterator over the storage keys. +/// Simple wrapper around the InMemoryBackend's RawIter that delegates all operations. pub struct RawIter { - pub(crate) args: RawIterArgs, - complete: bool, - _phantom: PhantomData, + inner: > as sp_state_machine::Backend>>::RawIter, } impl sp_state_machine::StorageIterator> @@ -49,108 +35,8 @@ impl sp_state_machine::StorageIterator Option> { - use sp_state_machine::Backend; - - let remote_fetch = - |key: Option, start_key: Option, block: Option| { - backend - .rpc() - .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) - .and_then(|keys| keys.first().cloned()) - }; - - let prefix = self.args.prefix.clone().map(StorageKey); - let start_key = self.args.start_at.clone().map(StorageKey); - - let maybe_next_key = if backend.before_fork { - // If RPC client is available, fetch remotely - if backend.rpc().is_some() { - remote_fetch(prefix, start_key, backend.block_hash) - } else { - // No RPC client, use local DB - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.start_at = self.args.start_at.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - } - } else { - // First, try to get next key from local DB - let next_storage_key = if let Some(ref start) = self.args.start_at { - // If we have a start_at, use next_storage_key to get the next one after it - backend.db.read().next_storage_key(start).ok().flatten() - } else { - // No start_at, use raw_iter to get the first key with the prefix - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - }; - - // Filter by prefix if necessary - let next_storage_key = next_storage_key - .filter(|key| prefix.as_ref().map(|p| key.starts_with(&p.0)).unwrap_or(true)); - - let removed_key = start_key - .clone() - .or(prefix.clone()) - .map(|key| backend.removed_keys.read().contains_key(&key.0)) - .unwrap_or(false); - if next_storage_key.is_none() && !removed_key { - let maybe_next_key = if backend.rpc().is_some() { - remote_fetch(prefix, start_key, backend.fork_block) - } else { - None - }; - match maybe_next_key { - Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), - _ => None, - } - } else { - next_storage_key - } - }; - - tracing::trace!( - target: LAZY_LOADING_LOG_TARGET, - "next_key: (prefix: {:?}, start_at: {:?}, next_key: {:?})", - self.args.prefix.clone().map(hex::encode), - self.args.start_at.clone().map(hex::encode), - maybe_next_key.clone().map(hex::encode) - ); - - if let Some(next_key) = maybe_next_key { - if self - .args - .prefix - .clone() - .map(|filter_key| next_key.starts_with(&filter_key)) - .unwrap_or(false) - { - self.args.start_at = Some(next_key.clone()); - Some(Ok(next_key)) - } else { - self.complete = true; - None - } - } else { - self.complete = true; - None - } + let db = backend.db.read(); + self.inner.next_key(&*db) } fn next_pair( @@ -158,117 +44,12 @@ impl sp_state_machine::StorageIterator Option> { - use sp_state_machine::Backend; - - let remote_fetch = - |key: Option, start_key: Option, block: Option| { - backend - .rpc() - .and_then(|rpc| rpc.storage_keys_paged(key, 5, start_key, block).ok()) - .and_then(|keys| keys.first().cloned()) - }; - - let prefix = self.args.prefix.clone().map(StorageKey); - let start_key = self.args.start_at.clone().map(StorageKey); - - let maybe_next_key = if backend.before_fork { - // If RPC client is available, fetch remotely - if backend.rpc().is_some() { - remote_fetch(prefix, start_key, backend.block_hash) - } else { - // No RPC client, use local DB - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.start_at = self.args.start_at.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - } - } else { - // First, try to get next key from local DB - let next_storage_key = if let Some(ref start) = self.args.start_at { - // If we have a start_at, use next_storage_key to get the next one after it - backend.db.read().next_storage_key(start).ok().flatten() - } else { - // No start_at, use raw_iter to get the first key with the prefix - let mut iter_args = sp_state_machine::backend::IterArgs::default(); - iter_args.prefix = self.args.prefix.as_deref(); - iter_args.stop_on_incomplete_database = true; - - let readable_db = backend.db.read(); - readable_db - .raw_iter(iter_args) - .map(|mut iter| iter.next_key(&readable_db)) - .map(|op| op.and_then(|result| result.ok())) - .ok() - .flatten() - }; - - // Filter by prefix if necessary - let next_storage_key = next_storage_key - .filter(|key| prefix.as_ref().map(|p| key.starts_with(&p.0)).unwrap_or(true)); - - let removed_key = start_key - .clone() - .or(prefix.clone()) - .map(|key| backend.removed_keys.read().contains_key(&key.0)) - .unwrap_or(false); - if next_storage_key.is_none() && !removed_key { - let maybe_next_key = if backend.rpc().is_some() { - remote_fetch(prefix, start_key, backend.fork_block) - } else { - None - }; - match maybe_next_key { - Some(key) if !backend.removed_keys.read().contains_key(&key) => Some(key), - _ => None, - } - } else { - next_storage_key - } - }; - - tracing::trace!( - target: LAZY_LOADING_LOG_TARGET, - "next_pair: (prefix: {:?}, start_at: {:?}, next_key: {:?})", - self.args.prefix.clone().map(hex::encode), - self.args.start_at.clone().map(hex::encode), - maybe_next_key.clone().map(hex::encode) - ); - - let maybe_value = maybe_next_key - .clone() - .and_then(|key| (*backend).storage(key.as_slice()).ok()) - .flatten(); - - if let Some(next_key) = maybe_next_key { - if self - .args - .prefix - .clone() - .map(|filter_key| next_key.starts_with(&filter_key)) - .unwrap_or(false) - { - self.args.start_at = Some(next_key.clone()); - maybe_value.map(|value| Ok((next_key, value))) - } else { - self.complete = true; - None - } - } else { - self.complete = true; - None - } + let db = backend.db.read(); + self.inner.next_pair(&*db) } fn was_complete(&self) -> bool { - self.complete + self.inner.was_complete() } } @@ -300,10 +81,7 @@ impl ForkedLazyBackend { ) { if let Some(val) = value { let mut entries: HashMap, StorageCollection> = Default::default(); - entries.insert( - Some(child_info.clone()), - vec![(key.to_vec(), Some(val.clone()))], - ); + entries.insert(Some(child_info.clone()), vec![(key.to_vec(), Some(val.clone()))]); self.db.write().insert(entries, StateVersion::V1); } @@ -428,9 +206,7 @@ impl sp_state_machine::Backend Result, Self::Error> { let remote_fetch = |block: Option| -> Option> { self.rpc() - .and_then(|rpc| { - rpc.child_storage(child_info, StorageKey(key.to_vec()), block).ok() - }) + .and_then(|rpc| rpc.child_storage(child_info, StorageKey(key.to_vec()), block).ok()) .flatten() .map(|v| v.0) }; @@ -625,12 +401,8 @@ impl sp_state_machine::Backend) -> Result { - let clone = RawIterArgs { - prefix: args.prefix.map(|v| v.to_vec()), - start_at: args.start_at.map(|v| v.to_vec()), - }; - - Ok(RawIter:: { args: clone, complete: false, _phantom: Default::default() }) + let inner = self.db.read().raw_iter(args)?; + Ok(RawIter { inner }) } fn register_overlay_stats(&self, stats: &sp_state_machine::StateMachineStats) { diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 6fe2372365ed1..ef84ceaa60906 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -7,7 +7,7 @@ use polkadot_sdk::{ OpaqueExtrinsic, traits::{BlakeTwo256, Header as HeaderT}, }, - sp_state_machine::{self, StorageIterator}, + sp_state_machine, sp_storage::{StorageData, StorageKey}, }; use std::{ @@ -81,14 +81,6 @@ mod mock_rpc { pub fn put_storage(&self, at: Block::Hash, key: StorageKey, val: StorageData) { self.storage.write().insert((at, key), val); } - pub fn put_storage_keys_page( - &self, - at: Block::Hash, - prefix: Vec, - keys: Vec, - ) { - self.storage_keys_pages.write().insert((at, prefix), keys); - } pub fn put_header(&self, h: Block::Header) { self.headers.write().insert(h.hash(), h); } @@ -398,50 +390,6 @@ mod tests { assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); } - #[test] - fn raw_iter_merges_local_then_remote() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(7); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - - // block #8 - let b8 = make_block(8, cp.hash(), vec![]); - rpc.put_header(b8.header.clone()); - rpc.put_block(b8.clone(), None); - let state = backend.state_at(b8.header.hash(), TrieCacheContext::Trusted).unwrap(); - - // Preload local DB with key "a1" - state.update_storage(b"a1", &Some(b"v1".to_vec())); - - // Ensure storage_root is computed to make the key visible to raw_iter - let _ = state.db.write().storage_root( - vec![(b"a1".as_ref(), Some(b"v1".as_ref()))].into_iter(), - StateVersion::V1, - ); - - // Remote has only "a2" under same prefix at fork block (not "a1") - rpc.put_storage_keys_page(cp.hash(), b"a".to_vec(), vec![StorageKey(b"a2".to_vec())]); - rpc.put_storage(cp.hash(), StorageKey(b"a2".to_vec()), StorageData(b"v2".to_vec())); - - let mut args = polkadot_sdk::sp_state_machine::IterArgs::default(); - args.prefix = Some(&b"a"[..]); - let mut it = state.raw_iter(args).unwrap(); - - // next_pair should return ("a1","v1") from local - let p1 = it.next_pair(&state).unwrap().unwrap(); - assert_eq!(p1.0, b"a1".to_vec()); - assert_eq!(p1.1, b"v1".to_vec()); - - // next_pair should now bring remote ("a2","v2") - let p2 = it.next_pair(&state).unwrap().unwrap(); - assert_eq!(p2.0, b"a2".to_vec()); - assert_eq!(p2.1, b"v2".to_vec()); - - // done - assert!(it.next_pair(&state).is_none()); - assert!(it.was_complete()); - } - #[test] fn blockchain_header_and_number_are_cached() { let rpc = std::sync::Arc::new(Rpc::new()); From 8e82949e7e98be04888da07f25e8e0009e3c794f Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 17 Nov 2025 23:15:27 -0300 Subject: [PATCH 34/44] Fix clippy --- .../lazy_loading/backend/tests.rs | 521 +++++++++--------- 1 file changed, 256 insertions(+), 265 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index ef84ceaa60906..8f2815342fcae 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -2,7 +2,7 @@ use super::*; use mock_rpc::{Rpc, TestBlock, TestHeader}; use parking_lot::RwLock; use polkadot_sdk::{ - sc_client_api::{Backend as BackendT, StateBackend}, + sc_client_api::{Backend as BackendT, HeaderBackend, StateBackend}, sp_runtime::{ OpaqueExtrinsic, traits::{BlakeTwo256, Header as HeaderT}, @@ -279,317 +279,308 @@ mod mock_rpc { } } -#[cfg(test)] -mod tests { - use super::*; - use polkadot_sdk::sc_client_api::HeaderBackend; +type N = u32; +type TestBlockT = TestBlock; - type N = u32; - type TestBlockT = TestBlock; - - fn make_header(number: N, parent: ::Hash) -> TestHeader { - TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) - } +fn make_header(number: N, parent: ::Hash) -> TestHeader { + TestHeader::new(number, Default::default(), Default::default(), parent, Default::default()) +} - fn make_block( - number: N, - parent: ::Hash, - xts: Vec, - ) -> TestBlock { - let header = make_header(number, parent); - TestBlock::new(header, xts) - } +fn make_block( + number: N, + parent: ::Hash, + xts: Vec, +) -> TestBlock { + let header = make_header(number, parent); + TestBlock::new(header, xts) +} - fn checkpoint(n: N) -> TestHeader { - make_header(n, Default::default()) - } +fn checkpoint(n: N) -> TestHeader { + make_header(n, Default::default()) +} - #[test] - fn before_fork_reads_remote_only() { - let rpc = std::sync::Arc::new(Rpc::new()); - // fork checkpoint at #100 - let cp = checkpoint(100); - let backend = Backend::::new(Some(rpc.clone()), Some(cp)); - - // state_at(Default::default()) => before_fork=true - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - - let key = b":foo".to_vec(); - // prepare remote value at "block_hash = Default::default()" - let at = Default::default(); - rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); - - // read storage - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, Some(b"bar".to_vec())); - - // not cached in DB: second read still goes to RPC - let v2 = state.storage(&key).unwrap(); - assert_eq!(v2, Some(b"bar".to_vec())); - assert!(rpc.counters.storage_calls.load(Ordering::Relaxed) >= 2); - } +#[test] +fn before_fork_reads_remote_only() { + let rpc = std::sync::Arc::new(Rpc::new()); + // fork checkpoint at #100 + let cp = checkpoint(100); + let backend = Backend::::new(Some(rpc.clone()), Some(cp)); + + // state_at(Default::default()) => before_fork=true + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + + let key = b":foo".to_vec(); + // prepare remote value at "block_hash = Default::default()" + let at = Default::default(); + rpc.put_storage(at, StorageKey(key.clone()), StorageData(b"bar".to_vec())); + + // read storage + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"bar".to_vec())); + + // not cached in DB: second read still goes to RPC + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"bar".to_vec())); + assert!(rpc.counters.storage_calls.load(Ordering::Relaxed) >= 2); +} - #[test] - fn after_fork_first_fetch_caches_subsequent_hits_local() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(10); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - - // Build a block #11 > checkpoint (#10), with parent #10 - let parent = cp.hash(); - let b11 = make_block(11, parent, vec![]); - let h11 = b11.header.hash(); - - rpc.put_header(b11.header.clone()); - rpc.put_block(b11, None); - - // remote storage at fork block (checkpoint hash) - let fork_hash = cp.hash(); - let key = b":k".to_vec(); - rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); - - // Grab state_at(#11): after_fork=false; local DB empty - let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); - - // First read fetches remote and caches - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, Some(b"v".to_vec())); - - // Mutate RPC to detect second call (remove remote value) - // If second read still tries RPC, it would return None; but it should come from cache. - // So we do not change the mock; instead, assert RPC call count increases only once. - let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); - let _ = state.storage(&key).unwrap(); - let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); - assert_eq!(calls_before, calls_after, "second hit should be served from cache"); - } +#[test] +fn after_fork_first_fetch_caches_subsequent_hits_local() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // Build a block #11 > checkpoint (#10), with parent #10 + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); + + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); + + // remote storage at fork block (checkpoint hash) + let fork_hash = cp.hash(); + let key = b":k".to_vec(); + rpc.put_storage(fork_hash, StorageKey(key.clone()), StorageData(b"v".to_vec())); + + // Grab state_at(#11): after_fork=false; local DB empty + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + + // First read fetches remote and caches + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, Some(b"v".to_vec())); + + // Mutate RPC to detect second call (remove remote value) + // If second read still tries RPC, it would return None; but it should come from cache. + // So we do not change the mock; instead, assert RPC call count increases only once. + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let _ = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + assert_eq!(calls_before, calls_after, "second hit should be served from cache"); +} - #[test] - fn removed_keys_prevents_remote_fetch() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(5); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - - // make block #6 - let b6 = make_block(6, cp.hash(), vec![]); - rpc.put_header(b6.header.clone()); - rpc.put_block(b6.clone(), None); - let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); - - // mark key as removed - let key = b":dead".to_vec(); - state.removed_keys.write().insert(key.clone(), ()); - - // Even if remote has a value, backend must not fetch it - rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); - let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); - let v = state.storage(&key).unwrap(); - let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); - - assert!(v.is_none()); - assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); - } +#[test] +fn removed_keys_prevents_remote_fetch() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + // mark key as removed + let key = b":dead".to_vec(); + state.removed_keys.write().insert(key.clone(), ()); + + // Even if remote has a value, backend must not fetch it + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"ghost".to_vec())); + let calls_before = rpc.counters.storage_calls.load(Ordering::Relaxed); + let v = state.storage(&key).unwrap(); + let calls_after = rpc.counters.storage_calls.load(Ordering::Relaxed); + + assert!(v.is_none()); + assert_eq!(calls_before, calls_after, "should not call RPC for removed keys"); +} - #[test] - fn blockchain_header_and_number_are_cached() { - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(3); - let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); - let chain = backend.blockchain(); - - // prepare one block w/ extrinsics - let xts: Vec = vec![]; - let b4 = make_block(4, cp.hash(), xts); - let h4 = b4.header().hash(); - rpc.put_block(b4, None); - - // first header() fetches RPC and caches as Full - let h = chain.header(h4).unwrap().unwrap(); - assert_eq!(h.hash(), h4); - - // number() should now return from cache (no extra RPC needed) - let calls_before = rpc.counters.block_calls.load(Ordering::Relaxed); - let number = chain.number(h4).unwrap().unwrap(); - let calls_after = rpc.counters.block_calls.load(Ordering::Relaxed); - - assert_eq!(number, 4); - assert_eq!( - calls_before, calls_after, - "number() should be served from cache after header()" - ); - } +#[test] +fn blockchain_header_and_number_are_cached() { + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(3); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + let chain = backend.blockchain(); + + // prepare one block w/ extrinsics + let xts: Vec = vec![]; + let b4 = make_block(4, cp.hash(), xts); + let h4 = b4.header().hash(); + rpc.put_block(b4, None); + + // first header() fetches RPC and caches as Full + let h = chain.header(h4).unwrap().unwrap(); + assert_eq!(h.hash(), h4); + + // number() should now return from cache (no extra RPC needed) + let calls_before = rpc.counters.block_calls.load(Ordering::Relaxed); + let number = chain.number(h4).unwrap().unwrap(); + let calls_after = rpc.counters.block_calls.load(Ordering::Relaxed); + + assert_eq!(number, 4); + assert_eq!(calls_before, calls_after, "number() should be served from cache after header()"); +} - #[test] - fn no_fork_mode_uses_local_db_only() { - let backend = Backend::::new(None, None); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); +#[test] +fn no_fork_mode_uses_local_db_only() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - assert!(!state.before_fork); + assert!(!state.before_fork); - let key = b":test_key".to_vec(); - let v1 = state.storage(&key).unwrap(); - assert_eq!(v1, None); + let key = b":test_key".to_vec(); + let v1 = state.storage(&key).unwrap(); + assert_eq!(v1, None); - state.update_storage(&key, &Some(b"local_value".to_vec())); + state.update_storage(&key, &Some(b"local_value".to_vec())); - let v2 = state.storage(&key).unwrap(); - assert_eq!(v2, Some(b"local_value".to_vec())); - } + let v2 = state.storage(&key).unwrap(); + assert_eq!(v2, Some(b"local_value".to_vec())); +} - #[test] - fn no_fork_mode_state_at_default() { - let backend = Backend::::new(None, None); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); +#[test] +fn no_fork_mode_state_at_default() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - assert!(!state.before_fork); - assert_eq!(state.fork_block, None); - assert!(state.rpc_client.is_none()); - } + assert!(!state.before_fork); + assert_eq!(state.fork_block, None); + assert!(state.rpc_client.is_none()); +} - #[test] - fn no_fork_mode_storage_operations() { - let backend = Backend::::new(None, None); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); +#[test] +fn no_fork_mode_storage_operations() { + let backend = Backend::::new(None, None); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - let key1 = b":key1".to_vec(); - let key2 = b":key2".to_vec(); - let key3 = b":key3".to_vec(); + let key1 = b":key1".to_vec(); + let key2 = b":key2".to_vec(); + let key3 = b":key3".to_vec(); - state.update_storage(&key1, &Some(b"value1".to_vec())); - state.update_storage(&key2, &Some(b"value2".to_vec())); + state.update_storage(&key1, &Some(b"value1".to_vec())); + state.update_storage(&key2, &Some(b"value2".to_vec())); - assert_eq!(state.storage(&key1).unwrap(), Some(b"value1".to_vec())); - assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); - assert_eq!(state.storage(&key3).unwrap(), None); - } + assert_eq!(state.storage(&key1).unwrap(), Some(b"value1".to_vec())); + assert_eq!(state.storage(&key2).unwrap(), Some(b"value2".to_vec())); + assert_eq!(state.storage(&key3).unwrap(), None); +} - #[test] - fn child_storage_before_fork_reads_remote() { - use polkadot_sdk::sp_storage::ChildInfo; +#[test] +fn child_storage_before_fork_reads_remote() { + use polkadot_sdk::sp_storage::ChildInfo; - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(100); - let backend = super::Backend::::new(Some(rpc.clone()), Some(cp)); + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(100); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp)); - let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); + let state = backend.state_at(Default::default(), TrieCacheContext::Trusted).unwrap(); - let child_info = ChildInfo::new_default(b"child1"); - let key = b":child_key".to_vec(); - let at = Default::default(); + let child_info = ChildInfo::new_default(b"child1"); + let key = b":child_key".to_vec(); + let at = Default::default(); - // Put child storage in mock RPC - rpc.put_child_storage( - at, - child_info.storage_key().to_vec(), - StorageKey(key.clone()), - StorageData(b"child_value".to_vec()), - ); + // Put child storage in mock RPC + rpc.put_child_storage( + at, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"child_value".to_vec()), + ); - // Read child storage - should fetch from RPC - let v = state.child_storage(&child_info, &key).unwrap(); - assert_eq!(v, Some(b"child_value".to_vec())); - } + // Read child storage - should fetch from RPC + let v = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v, Some(b"child_value".to_vec())); +} - #[test] - fn child_storage_after_fork_caches() { - use polkadot_sdk::sp_storage::ChildInfo; +#[test] +fn child_storage_after_fork_caches() { + use polkadot_sdk::sp_storage::ChildInfo; - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(10); - let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(10); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); - let parent = cp.hash(); - let b11 = make_block(11, parent, vec![]); - let h11 = b11.header.hash(); + let parent = cp.hash(); + let b11 = make_block(11, parent, vec![]); + let h11 = b11.header.hash(); - rpc.put_header(b11.header.clone()); - rpc.put_block(b11, None); + rpc.put_header(b11.header.clone()); + rpc.put_block(b11, None); - let child_info = ChildInfo::new_default(b"child2"); - let key = b":child_key2".to_vec(); - let fork_hash = cp.hash(); + let child_info = ChildInfo::new_default(b"child2"); + let key = b":child_key2".to_vec(); + let fork_hash = cp.hash(); - rpc.put_child_storage( - fork_hash, - child_info.storage_key().to_vec(), - StorageKey(key.clone()), - StorageData(b"cached_value".to_vec()), - ); + rpc.put_child_storage( + fork_hash, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"cached_value".to_vec()), + ); - let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); + let state = backend.state_at(h11, TrieCacheContext::Trusted).unwrap(); - // First read - should cache - let v1 = state.child_storage(&child_info, &key).unwrap(); - assert_eq!(v1, Some(b"cached_value".to_vec())); + // First read - should cache + let v1 = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v1, Some(b"cached_value".to_vec())); - // Second read - should come from cache (we can verify by checking the value is still there) - let v2 = state.child_storage(&child_info, &key).unwrap(); - assert_eq!(v2, Some(b"cached_value".to_vec())); - } + // Second read - should come from cache (we can verify by checking the value is still there) + let v2 = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v2, Some(b"cached_value".to_vec())); +} - #[test] - fn child_storage_hash_reads_from_rpc() { - use polkadot_sdk::sp_storage::ChildInfo; +#[test] +fn child_storage_hash_reads_from_rpc() { + use polkadot_sdk::sp_storage::ChildInfo; - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(50); - let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(50); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); - let parent = cp.hash(); - let b51 = make_block(51, parent, vec![]); - let h51 = b51.header.hash(); + let parent = cp.hash(); + let b51 = make_block(51, parent, vec![]); + let h51 = b51.header.hash(); - rpc.put_header(b51.header.clone()); - rpc.put_block(b51, None); + rpc.put_header(b51.header.clone()); + rpc.put_block(b51, None); - let child_info = ChildInfo::new_default(b"child3"); - let key = b":hash_key".to_vec(); - let fork_hash = cp.hash(); - let expected_hash = ::Hash::default(); + let child_info = ChildInfo::new_default(b"child3"); + let key = b":hash_key".to_vec(); + let fork_hash = cp.hash(); + let expected_hash = ::Hash::default(); - rpc.put_child_storage_hash( - fork_hash, - child_info.storage_key().to_vec(), - StorageKey(key.clone()), - expected_hash, - ); + rpc.put_child_storage_hash( + fork_hash, + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + expected_hash, + ); - let state = backend.state_at(h51, TrieCacheContext::Trusted).unwrap(); + let state = backend.state_at(h51, TrieCacheContext::Trusted).unwrap(); - let hash = state.child_storage_hash(&child_info, &key).unwrap(); - assert_eq!(hash, Some(expected_hash)); - } + let hash = state.child_storage_hash(&child_info, &key).unwrap(); + assert_eq!(hash, Some(expected_hash)); +} - #[test] - fn next_child_storage_key_uses_paged() { - use polkadot_sdk::sp_storage::ChildInfo; +#[test] +fn next_child_storage_key_uses_paged() { + use polkadot_sdk::sp_storage::ChildInfo; - let rpc = std::sync::Arc::new(Rpc::new()); - let cp = checkpoint(20); - let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); - let parent = cp.hash(); - let b21 = make_block(21, parent, vec![]); - let h21 = b21.header.hash(); + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); - rpc.put_header(b21.header.clone()); - rpc.put_block(b21, None); + rpc.put_header(b21.header.clone()); + rpc.put_block(b21, None); - let child_info = ChildInfo::new_default(b"child4"); - let fork_hash = cp.hash(); + let child_info = ChildInfo::new_default(b"child4"); + let fork_hash = cp.hash(); - // Put a page of keys - rpc.put_child_storage_keys_page( - fork_hash, - child_info.storage_key().to_vec(), - vec![], - vec![StorageKey(b"key1".to_vec()), StorageKey(b"key2".to_vec())], - ); + // Put a page of keys + rpc.put_child_storage_keys_page( + fork_hash, + child_info.storage_key().to_vec(), + vec![], + vec![StorageKey(b"key1".to_vec()), StorageKey(b"key2".to_vec())], + ); - let state = backend.state_at(h21, TrieCacheContext::Trusted).unwrap(); + let state = backend.state_at(h21, TrieCacheContext::Trusted).unwrap(); - // Get next key after "key1" should be "key2" - let next = state.next_child_storage_key(&child_info, b"key1").unwrap(); - assert_eq!(next, Some(b"key2".to_vec())); - } + // Get next key after "key1" should be "key2" + let next = state.next_child_storage_key(&child_info, b"key1").unwrap(); + assert_eq!(next, Some(b"key2".to_vec())); } From 4e2de04cdc586a4b759faf336e7712ec7aac7560 Mon Sep 17 00:00:00 2001 From: Diego Date: Mon, 17 Nov 2025 23:23:06 -0300 Subject: [PATCH 35/44] Fix removed keys tracking for child storage --- .../src/substrate_node/lazy_loading/backend/mod.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 964128fd487bb..b14b57a0f85cc 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -130,6 +130,17 @@ impl backend::Backend for Backend Date: Tue, 18 Nov 2025 11:39:44 -0300 Subject: [PATCH 36/44] Refactor import --- .../lazy_loading/backend/block_import_operation.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs index 0c2c9664efd7f..477d00fc33ff1 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/block_import_operation.rs @@ -6,7 +6,9 @@ use polkadot_sdk::{ Justification, Justifications, StateVersion, Storage, traits::{Block as BlockT, HashingFor}, }, - sp_state_machine::{self, BackendTransaction, ChildStorageCollection, StorageCollection}, + sp_state_machine::{ + self, Backend, BackendTransaction, ChildStorageCollection, StorageCollection, + }, }; use serde::de::DeserializeOwned; @@ -33,7 +35,6 @@ impl BlockImportOperation { commit: bool, state_version: StateVersion, ) -> sp_blockchain::Result { - use sp_state_machine::Backend; check_genesis_storage(&storage)?; let child_delta = storage.children_default.values().map(|child_content| { From 70e6f3f3b3eef51e4d808f3da25b8c991b836b31 Mon Sep 17 00:00:00 2001 From: Diego Date: Tue, 18 Nov 2025 12:03:24 -0300 Subject: [PATCH 37/44] Remove unnecesary check --- .../src/substrate_node/lazy_loading/backend/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index b14b57a0f85cc..0d38ec9123fb9 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -314,7 +314,7 @@ impl backend::Backend for Backend revertible { target = revertible; From d9c24dac21b23c5c32367777376c4db92fd4d2d5 Mon Sep 17 00:00:00 2001 From: Diego Date: Tue, 18 Nov 2025 12:49:40 -0300 Subject: [PATCH 38/44] Use hashset for removed keys --- .../backend/forked_lazy_backend.rs | 22 +++++++++++-------- .../lazy_loading/backend/mod.rs | 12 +++++----- .../lazy_loading/backend/tests.rs | 2 +- 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index 70b81aa9af624..a4e208cbfb31e 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -1,5 +1,6 @@ use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; use alloy_primitives::hex; +use parking_lot::RwLock; use polkadot_sdk::{ sc_client_api::StorageKey, sp_core, @@ -15,7 +16,10 @@ use polkadot_sdk::{ sp_trie::{self, PrefixedMemoryDB}, }; use serde::de::DeserializeOwned; -use std::{collections::HashMap, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; /// DB-backed patricia trie state, transaction type is an overlay of changes to commit. pub type DbState = TrieBackend>>, HashingFor>; @@ -58,8 +62,8 @@ pub struct ForkedLazyBackend { pub(crate) rpc_client: Option>>, pub(crate) block_hash: Option, pub(crate) fork_block: Option, - pub(crate) db: Arc>>>, - pub(crate) removed_keys: Arc, ()>>>, + pub(crate) db: Arc>>>, + pub(crate) removed_keys: Arc>>>, pub(crate) before_fork: bool, } @@ -123,7 +127,7 @@ impl sp_state_machine::Backend Some(data), - _ if !self.removed_keys.read().contains_key(key) => { + _ if !self.removed_keys.read().contains(key) => { let result = if self.rpc().is_some() { remote_fetch(self.fork_block) } else { None }; @@ -167,7 +171,7 @@ impl sp_state_machine::Backend Ok(Some(hash)), - _ if !self.removed_keys.read().contains_key(key) => { + _ if !self.removed_keys.read().contains(key) => { if self.rpc().is_some() { remote_fetch(self.fork_block) } else { @@ -228,7 +232,7 @@ impl sp_state_machine::Backend Ok(Some(value)), Ok(None) => { - if self.removed_keys.read().contains_key(key) { + if self.removed_keys.read().contains(key) { return Ok(None); } @@ -273,7 +277,7 @@ impl sp_state_machine::Backend Ok(Some(hash)), Ok(None) => { - if self.removed_keys.read().contains_key(key) { + if self.removed_keys.read().contains(key) { return Ok(None); } @@ -305,7 +309,7 @@ impl sp_state_machine::Backend Some(next_key), - _ if !self.removed_keys.read().contains_key(key) => { + _ if !self.removed_keys.read().contains(key) => { if self.rpc().is_some() { remote_fetch(self.fork_block) } else { @@ -353,7 +357,7 @@ impl sp_state_machine::Backend Some(next_key), - _ if !self.removed_keys.read().contains_key(key) => { + _ if !self.removed_keys.read().contains(key) => { if self.rpc().is_some() { remote_fetch(self.fork_block) } else { diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 0d38ec9123fb9..7373b12a76680 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -122,26 +122,26 @@ impl backend::Backend for Backend Date: Tue, 18 Nov 2025 13:34:54 -0300 Subject: [PATCH 39/44] Implement status and justifications using lazy loading --- .../lazy_loading/backend/blockchain.rs | 31 +++++++++-- .../lazy_loading/backend/tests.rs | 53 +++++++++++++++++++ 2 files changed, 80 insertions(+), 4 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs index 8951cb04c189b..fd11c8b27e413 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/blockchain.rs @@ -310,9 +310,20 @@ impl HeaderBackend for Blockchain sp_blockchain::Result { - match self.storage.read().blocks.contains_key(&hash) { - true => Ok(BlockStatus::InChain), - false => Ok(BlockStatus::Unknown), + // Check local storage first + if self.storage.read().blocks.contains_key(&hash) { + return Ok(BlockStatus::InChain); + } + + // If not in local storage, check RPC + if let Some(rpc) = self.rpc() { + match rpc.header(Some(hash)) { + Ok(Some(_)) => Ok(BlockStatus::InChain), + Ok(None) => Ok(BlockStatus::Unknown), + Err(_) => Ok(BlockStatus::Unknown), + } + } else { + Ok(BlockStatus::Unknown) } } @@ -380,7 +391,19 @@ impl sp_blockchain::Backend for Blockch } fn justifications(&self, hash: Block::Hash) -> sp_blockchain::Result> { - Ok(self.storage.read().blocks.get(&hash).and_then(|b| b.justifications().cloned())) + // Check local storage first + if let Some(justifications) = + self.storage.read().blocks.get(&hash).and_then(|b| b.justifications().cloned()) + { + return Ok(Some(justifications)); + } + + // If not in local storage, fetch from RPC + let justifications = self.rpc().and_then(|rpc| { + rpc.block(Some(hash)).ok().flatten().and_then(|full| full.justifications) + }); + + Ok(justifications) } fn last_finalized(&self) -> sp_blockchain::Result { diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 1c63c65d75107..fc97e59d095be 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -584,3 +584,56 @@ fn next_child_storage_key_uses_paged() { let next = state.next_child_storage_key(&child_info, b"key1").unwrap(); assert_eq!(next, Some(b"key2".to_vec())); } + +#[test] +fn blockchain_status_queries_rpc_when_not_local() { + use polkadot_sdk::sp_blockchain::{BlockStatus, HeaderBackend}; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); + + // Block is not in local storage yet, but exists in RPC + rpc.put_header(b21.header.clone()); + + // status() should query RPC and return InChain + let status = backend.blockchain().status(h21).unwrap(); + assert_eq!(status, BlockStatus::InChain); + + // Now query for a block that doesn't exist anywhere + let unknown_hash = make_block(999, parent, vec![]).header.hash(); + let status = backend.blockchain().status(unknown_hash).unwrap(); + assert_eq!(status, BlockStatus::Unknown); +} + +#[test] +fn blockchain_justifications_queries_rpc_when_not_local() { + use polkadot_sdk::sp_blockchain::Backend as BlockchainBackend; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(20); + let backend = super::Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + let parent = cp.hash(); + let b21 = make_block(21, parent, vec![]); + let h21 = b21.header.hash(); + + // Create justifications + let justifications = polkadot_sdk::sp_runtime::Justifications::from((*b"TEST", vec![1, 2, 3, 4])); + + // Block is not in local storage yet, but exists in RPC with justifications + rpc.put_block(b21, Some(justifications.clone())); + + // justifications() should query RPC and return the justifications + let result = backend.blockchain().justifications(h21).unwrap(); + assert_eq!(result, Some(justifications)); + + // Now query for a block that doesn't exist anywhere + let unknown_hash = make_block(999, parent, vec![]).header.hash(); + let result = backend.blockchain().justifications(unknown_hash).unwrap(); + assert_eq!(result, None); +} From 2a828fb4e0fc9d4e16de4359666aaf4321918d24 Mon Sep 17 00:00:00 2001 From: Diego Date: Tue, 18 Nov 2025 14:19:11 -0300 Subject: [PATCH 40/44] Ignore extra leaves process --- .../lazy_loading/backend/mod.rs | 24 +------------------ 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 7373b12a76680..0743cc1307047 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -386,30 +386,8 @@ impl backend::Backend for Backend = - storage.leaves.revert(best_hash_after, best_number_after).collect(); - for (hash, number) in extra_leaves { - if let Some(count) = pinned.get(&hash) { - if *count > 0 { - return Err(sp_blockchain::Error::Backend(format!( - "Can't revert pinned block {hash:?}", - ))); - } - } - - storage.blocks.remove(&hash); - if let Some(entry) = storage.hashes.get(&number) { - if *entry == hash { - storage.hashes.remove(&number); - } - } - states.remove(&hash); - - if number <= original_finalized_number { - reverted_up_to_finalized.insert(hash); - } - } + let _ = storage.leaves.revert(best_hash_after, best_number_after); storage.hashes.insert(best_number_after, best_hash_after); From 2e85f4da23880da6820f6f5981fc5a9c52956100 Mon Sep 17 00:00:00 2001 From: Diego Date: Tue, 18 Nov 2025 14:44:08 -0300 Subject: [PATCH 41/44] Add comment explaining removed_keys usage --- .../lazy_loading/backend/forked_lazy_backend.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index a4e208cbfb31e..f0305bb607e7c 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -63,7 +63,11 @@ pub struct ForkedLazyBackend { pub(crate) block_hash: Option, pub(crate) fork_block: Option, pub(crate) db: Arc>>>, + + /// Keys explicitly deleted after fork. Prevents RPC fallback from returning stale values + /// for deleted keys (distinguishes "not cached locally" from "deleted"). pub(crate) removed_keys: Arc>>>, + pub(crate) before_fork: bool, } From 4a6a85082126e584ce12abb76718d2f8018b20cd Mon Sep 17 00:00:00 2001 From: Diego Date: Wed, 19 Nov 2025 13:54:07 -0300 Subject: [PATCH 42/44] Use composite keys for child storage keys --- .../backend/forked_lazy_backend.rs | 24 ++++-- .../lazy_loading/backend/mod.rs | 17 +++- .../lazy_loading/backend/tests.rs | 85 +++++++++++++++++++ 3 files changed, 116 insertions(+), 10 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index f0305bb607e7c..70391010a9ff3 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -1,3 +1,4 @@ +use super::make_composite_child_key; use crate::substrate_node::lazy_loading::{LAZY_LOADING_LOG_TARGET, rpc_client::RPCClient}; use alloy_primitives::hex; use parking_lot::RwLock; @@ -236,7 +237,9 @@ impl sp_state_machine::Backend Ok(Some(value)), Ok(None) => { - if self.removed_keys.read().contains(key) { + let composite_key = + make_composite_child_key(child_info.storage_key(), key); + if self.removed_keys.read().contains(&composite_key) { return Ok(None); } @@ -281,7 +284,9 @@ impl sp_state_machine::Backend Ok(Some(hash)), Ok(None) => { - if self.removed_keys.read().contains(key) { + let composite_key = + make_composite_child_key(child_info.storage_key(), key); + if self.removed_keys.read().contains(&composite_key) { return Ok(None); } @@ -361,15 +366,20 @@ impl sp_state_machine::Backend Some(next_key), - _ if !self.removed_keys.read().contains(key) => { - if self.rpc().is_some() { - remote_fetch(self.fork_block) + // Otherwise, check removed_keys and try remote fetch if not removed + _ => { + let composite_key = + make_composite_child_key(child_info.storage_key(), key); + if !self.removed_keys.read().contains(&composite_key) { + if self.rpc().is_some() { + remote_fetch(self.fork_block) + } else { + None + } } else { None } } - // Otherwise, there's no next key - _ => None, } } .filter(|next_key| next_key != key); diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index 0743cc1307047..b804d59cd360b 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -131,12 +131,13 @@ impl backend::Backend for Backend Vec { + let mut composite = Vec::with_capacity(child_storage_key.len() + key.len()); + composite.extend_from_slice(child_storage_key); + composite.extend_from_slice(key); + composite +} \ No newline at end of file diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index fc97e59d095be..42cde53c2ce8f 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -637,3 +637,88 @@ fn blockchain_justifications_queries_rpc_when_not_local() { let result = backend.blockchain().justifications(unknown_hash).unwrap(); assert_eq!(result, None); } + +#[test] +fn child_storage_removed_keys_uses_composite_key() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + let child_info1 = ChildInfo::new_default(b"child1"); + let child_info2 = ChildInfo::new_default(b"child2"); + let key = b"same_key".to_vec(); + + // Put values in RPC for both child storages with the same key + rpc.put_child_storage( + cp.hash(), + child_info1.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"value1".to_vec()), + ); + rpc.put_child_storage( + cp.hash(), + child_info2.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"value2".to_vec()), + ); + + // Mark the key as removed only for child1 using composite key + let composite_key1 = super::make_composite_child_key(child_info1.storage_key(), &key); + state.removed_keys.write().insert(composite_key1); + + // child1 should return None (key is removed) + let v1 = state.child_storage(&child_info1, &key).unwrap(); + assert_eq!(v1, None, "child1 key should be removed"); + + // child2 should still fetch from RPC (different composite key) + let v2 = state.child_storage(&child_info2, &key).unwrap(); + assert_eq!(v2, Some(b"value2".to_vec()), "child2 key should still be accessible"); +} + +#[test] +fn child_storage_removed_keys_no_collision_with_main_storage() { + use polkadot_sdk::sp_storage::ChildInfo; + + let rpc = std::sync::Arc::new(Rpc::new()); + let cp = checkpoint(5); + let backend = Backend::::new(Some(rpc.clone()), Some(cp.clone())); + + // make block #6 + let b6 = make_block(6, cp.hash(), vec![]); + rpc.put_header(b6.header.clone()); + rpc.put_block(b6.clone(), None); + let state = backend.state_at(b6.header.hash(), TrieCacheContext::Trusted).unwrap(); + + let child_info = ChildInfo::new_default(b"child1"); + let key = b"test_key".to_vec(); + + // Put value in main storage + rpc.put_storage(cp.hash(), StorageKey(key.clone()), StorageData(b"main_value".to_vec())); + + // Put value in child storage with the same key + rpc.put_child_storage( + cp.hash(), + child_info.storage_key().to_vec(), + StorageKey(key.clone()), + StorageData(b"child_value".to_vec()), + ); + + // Mark the key as removed in main storage (just the raw key) + state.removed_keys.write().insert(key.clone()); + + // Main storage should return None + let v_main = state.storage(&key).unwrap(); + assert_eq!(v_main, None, "main storage key should be removed"); + + // Child storage should still work (uses composite key) + let v_child = state.child_storage(&child_info, &key).unwrap(); + assert_eq!(v_child, Some(b"child_value".to_vec()), "child storage key should not be affected"); +} From dde6f2eb1998194a0939930d3c3a4a130ae2acfd Mon Sep 17 00:00:00 2001 From: Diego Date: Wed, 19 Nov 2025 14:00:57 -0300 Subject: [PATCH 43/44] Fix clippy and fmt --- .../lazy_loading/backend/forked_lazy_backend.rs | 15 ++++----------- .../substrate_node/lazy_loading/backend/mod.rs | 2 +- .../substrate_node/lazy_loading/backend/tests.rs | 5 +++-- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs index 70391010a9ff3..58597a0e120bd 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/forked_lazy_backend.rs @@ -237,8 +237,7 @@ impl sp_state_machine::Backend Ok(Some(value)), Ok(None) => { - let composite_key = - make_composite_child_key(child_info.storage_key(), key); + let composite_key = make_composite_child_key(child_info.storage_key(), key); if self.removed_keys.read().contains(&composite_key) { return Ok(None); } @@ -284,8 +283,7 @@ impl sp_state_machine::Backend Ok(Some(hash)), Ok(None) => { - let composite_key = - make_composite_child_key(child_info.storage_key(), key); + let composite_key = make_composite_child_key(child_info.storage_key(), key); if self.removed_keys.read().contains(&composite_key) { return Ok(None); } @@ -368,14 +366,9 @@ impl sp_state_machine::Backend Some(next_key), // Otherwise, check removed_keys and try remote fetch if not removed _ => { - let composite_key = - make_composite_child_key(child_info.storage_key(), key); + let composite_key = make_composite_child_key(child_info.storage_key(), key); if !self.removed_keys.read().contains(&composite_key) { - if self.rpc().is_some() { - remote_fetch(self.fork_block) - } else { - None - } + if self.rpc().is_some() { remote_fetch(self.fork_block) } else { None } } else { None } diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs index b804d59cd360b..9df04cae38788 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/mod.rs @@ -466,4 +466,4 @@ pub fn make_composite_child_key(child_storage_key: &[u8], key: &[u8]) -> Vec composite.extend_from_slice(child_storage_key); composite.extend_from_slice(key); composite -} \ No newline at end of file +} diff --git a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs index 42cde53c2ce8f..92597a6dfafb3 100644 --- a/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs +++ b/crates/anvil-polkadot/src/substrate_node/lazy_loading/backend/tests.rs @@ -598,7 +598,7 @@ fn blockchain_status_queries_rpc_when_not_local() { let h21 = b21.header.hash(); // Block is not in local storage yet, but exists in RPC - rpc.put_header(b21.header.clone()); + rpc.put_header(b21.header); // status() should query RPC and return InChain let status = backend.blockchain().status(h21).unwrap(); @@ -623,7 +623,8 @@ fn blockchain_justifications_queries_rpc_when_not_local() { let h21 = b21.header.hash(); // Create justifications - let justifications = polkadot_sdk::sp_runtime::Justifications::from((*b"TEST", vec![1, 2, 3, 4])); + let justifications = + polkadot_sdk::sp_runtime::Justifications::from((*b"TEST", vec![1, 2, 3, 4])); // Block is not in local storage yet, but exists in RPC with justifications rpc.put_block(b21, Some(justifications.clone())); From a4e4ff58224d5109a0ea8f1ee98d83e8344e6aaa Mon Sep 17 00:00:00 2001 From: Diego Date: Thu, 20 Nov 2025 20:26:17 -0300 Subject: [PATCH 44/44] Add test to check smart contract state isolation --- .../tests/it/contract_isolation.rs | 107 ++++++++++++++++++ crates/anvil-polkadot/tests/it/main.rs | 1 + 2 files changed, 108 insertions(+) create mode 100644 crates/anvil-polkadot/tests/it/contract_isolation.rs diff --git a/crates/anvil-polkadot/tests/it/contract_isolation.rs b/crates/anvil-polkadot/tests/it/contract_isolation.rs new file mode 100644 index 0000000000000..9b218a578cf80 --- /dev/null +++ b/crates/anvil-polkadot/tests/it/contract_isolation.rs @@ -0,0 +1,107 @@ +use std::time::Duration; + +use crate::{ + abi::SimpleStorage, + utils::{TestNode, get_contract_code, unwrap_response}, +}; +use alloy_primitives::{Address, U256}; +use alloy_rpc_types::{TransactionInput, TransactionRequest}; +use alloy_sol_types::SolCall; +use anvil_core::eth::EthRequest; +use anvil_polkadot::{ + api_server::revive_conversions::ReviveAddress, config::{AnvilNodeConfig, SubstrateNodeConfig}, +}; +use polkadot_sdk::pallet_revive::evm::Account; + +/// Tests that multiple contract instances maintain independent state +#[tokio::test(flavor = "multi_thread")] +async fn test_multiple_contract_instances_independent_storage() { + let anvil_node_config = AnvilNodeConfig::test_config(); + let substrate_node_config = SubstrateNodeConfig::new(&anvil_node_config); + let mut node = TestNode::new(anvil_node_config.clone(), substrate_node_config).await.unwrap(); + + let alith = Account::from(subxt_signer::eth::dev::alith()); + let alith_address = ReviveAddress::new(alith.address()); + let contract_code = get_contract_code("SimpleStorage"); + + // Deploy 3 instances of SimpleStorage contract (nonces 0, 1, 2) + let contract1_tx = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let contract2_tx = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let contract3_tx = node.deploy_contract(&contract_code.init, alith.address(), None).await; + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let receipt1 = node.get_transaction_receipt(contract1_tx).await; + let contract1_address = receipt1.contract_address.unwrap(); + let receipt2 = node.get_transaction_receipt(contract2_tx).await; + let contract2_address = receipt2.contract_address.unwrap(); + let receipt3 = node.get_transaction_receipt(contract3_tx).await; + let contract3_address = receipt3.contract_address.unwrap(); + + // Verify all contracts are deployed successfully + assert_eq!(receipt1.status, Some(polkadot_sdk::pallet_revive::U256::from(1))); + assert_eq!(receipt2.status, Some(polkadot_sdk::pallet_revive::U256::from(1))); + assert_eq!(receipt3.status, Some(polkadot_sdk::pallet_revive::U256::from(1))); + + // Set different values for each contract (Contract 1: 100, Contract 2: 200, Contract 3: 300) + let set_value1 = SimpleStorage::setValueCall::new((U256::from(100),)).abi_encode(); + let call_tx1 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract1_address))) + .input(TransactionInput::both(set_value1.into())); + node.send_transaction(call_tx1, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let set_value2 = SimpleStorage::setValueCall::new((U256::from(200),)).abi_encode(); + let call_tx2 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract2_address))) + .input(TransactionInput::both(set_value2.into())); + node.send_transaction(call_tx2, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + let set_value3 = SimpleStorage::setValueCall::new((U256::from(300),)).abi_encode(); + let call_tx3 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract3_address))) + .input(TransactionInput::both(set_value3.into())); + node.send_transaction(call_tx3, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + // Verify each contract maintains its own independent storage + let value1 = node.get_storage_at(U256::from(0), contract1_address).await; + let value2 = node.get_storage_at(U256::from(0), contract2_address).await; + let value3 = node.get_storage_at(U256::from(0), contract3_address).await; + + assert_eq!(value1, 100, "Contract 1 should have value 100"); + assert_eq!(value2, 200, "Contract 2 should have value 200"); + assert_eq!(value3, 300, "Contract 3 should have value 300"); + + // Update contract 2's value to 999 and verify others are unaffected + let update_value2 = SimpleStorage::setValueCall::new((U256::from(999),)).abi_encode(); + let update_tx2 = TransactionRequest::default() + .from(Address::from(alith_address)) + .to(Address::from(ReviveAddress::new(contract2_address))) + .input(TransactionInput::both(update_value2.into())); + node.send_transaction(update_tx2, None).await.unwrap(); + unwrap_response::<()>(node.eth_rpc(EthRequest::Mine(None, None)).await.unwrap()).unwrap(); + tokio::time::sleep(Duration::from_millis(500)).await; + + // Verify only contract 2 changed + let value1_after = node.get_storage_at(U256::from(0), contract1_address).await; + let value2_after = node.get_storage_at(U256::from(0), contract2_address).await; + let value3_after = node.get_storage_at(U256::from(0), contract3_address).await; + + assert_eq!(value1_after, 100, "Contract 1 value should remain 100"); + assert_eq!(value2_after, 999, "Contract 2 value should be updated to 999"); + assert_eq!(value3_after, 300, "Contract 3 value should remain 300"); +} diff --git a/crates/anvil-polkadot/tests/it/main.rs b/crates/anvil-polkadot/tests/it/main.rs index cb7c29087c063..7e0b7634b86a3 100644 --- a/crates/anvil-polkadot/tests/it/main.rs +++ b/crates/anvil-polkadot/tests/it/main.rs @@ -1,4 +1,5 @@ mod abi; +mod contract_isolation; mod genesis; mod impersonation; mod mining;