diff --git a/.github/workflows/binaries-build.yml b/.github/workflows/binaries-build.yml index c22eb6a063..e15456ad2d 100644 --- a/.github/workflows/binaries-build.yml +++ b/.github/workflows/binaries-build.yml @@ -59,13 +59,13 @@ jobs: 'aarch64-unknown-linux-gnu' run: | sudo apt update - sudo apt install -y clang llvm libudev-dev protobuf-compiler libssl-dev + sudo apt install -y clang llvm libudev-dev protobuf-compiler libssl-dev pkg-config - name: Setup build deps (aarch64-unknown-linux-gnu) if: matrix.build == 'aarch64-unknown-linux-gnu' run: | sudo apt update - sudo apt install -y g++-aarch64-linux-gnu libc6-dev-arm64-cross + sudo apt install -y g++-aarch64-linux-gnu libc6-dev-arm64-cross pkg-config-aarch64-linux-gnu export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc export CC_aarch64_unknown_linux_gnu=aarch64-linux-gnu-gcc export CXX_aarch64_unknown_linux_gnu=aarch64-linux-gnu-g+ @@ -99,15 +99,6 @@ jobs: export CXX_aarch64_unknown_linux_gnu=/usr/bin/aarch64-linux-gnu-g++ cargo build --release --target ${{ matrix.build }} - - name: Pre-Build (x86_64-pc-windows-msvc) - if: matrix.build == 'x86_64-pc-windows-msvc' - shell: bash - run: | - cargo fetch --target ${{ matrix.build }} - export reg_folder=$(ls C:/Users/runneradmin/.cargo/registry/src | grep github.com) - export pdb_folder=$(ls C:/Users/runneradmin/.cargo/registry/src/$reg_folder | grep parity-db) - sed -ir 's/madvise_random(_id: TableId, _map: \&mut memmap2::MmapMut)/madvise_random(_map: \&mut memmap2::MmapMut)/g' C:/Users/runneradmin/.cargo/registry/src/$reg_folder/$pdb_folder/src/file.rs - - name: Build (x86_64-pc-windows-msvc) if: matrix.build == 'x86_64-pc-windows-msvc' shell: bash diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index fad398920e..8dfeca5f68 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -45,6 +45,15 @@ jobs: uses: ./.github/workflows/starknet-js-tests.yml needs: madara_commands + starknet-foundry-tests: + name: Run Starknet Foundry compatibility tests + runs-on: ubuntu-latest + needs: madara_commands + env: + SNCAST_VERSION: "0.8.3" + steps: + - uses: keep-starknet-strange/starknet-foundry-compatibility-tests@main + # https://github.com/keep-starknet-strange/madara/issues/1097 # benchmark: # name: Run benchmarks diff --git a/.github/workflows/starknet-js-tests.yml b/.github/workflows/starknet-js-tests.yml index d6d17bd86e..b237187e82 100644 --- a/.github/workflows/starknet-js-tests.yml +++ b/.github/workflows/starknet-js-tests.yml @@ -20,7 +20,7 @@ jobs: fail-on-cache-miss: true - name: Setup dev chain run: | - ./target/release/madara setup --chain=dev --from-remote + ./target/release/madara setup --chain=dev --from-local=configs - name: Run starknet-js test run: |- ./target/release/madara --dev --execution native & diff --git a/.github/workflows/starknet-rpc-tests.yml b/.github/workflows/starknet-rpc-tests.yml index c7bd2068a8..bd2c67161e 100644 --- a/.github/workflows/starknet-rpc-tests.yml +++ b/.github/workflows/starknet-rpc-tests.yml @@ -29,7 +29,7 @@ jobs: sudo apt-get install -y clang llvm libudev-dev protobuf-compiler - name: Setup dev chain run: | - ./target/release/madara setup --chain=dev --from-remote + ./target/release/madara setup --chain=dev --from-local=configs - name: Run rpc native test run: |- ./target/release/madara --dev --sealing=manual --execution=Native & diff --git a/.gitmodules b/.gitmodules index 90c06c2e15..32a1c93e3b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,15 +1,9 @@ [submodule "madara-infra"] path = madara-infra url = https://github.com/keep-starknet-strange/madara-infra -[submodule "madara-app"] - path = madara-app - url = https://github.com/keep-starknet-strange/madara-app [submodule "madara-docs"] path = madara-docs url = https://github.com/keep-starknet-strange/madara-docs -[submodule "madara-dev-explorer"] - path = madara-dev-explorer - url = https://github.com/keep-starknet-strange/madara-dev-explorer [submodule "madara-tsukuyomi"] path = madara-tsukuyomi url = https://github.com/keep-starknet-strange/madara-tsukuyomi diff --git a/CHANGELOG.md b/CHANGELOG.md index c9ab985c63..e76e049b6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,28 @@ ## Next release +- chore: remove crates that have been copy-pasted from plkdtSDK +- feat(rpc): return deployed contract address and actual fee in transaction + receipt +- fix: Wait for 1 minute for transaction to be processed in + get_transaction_receipt rpc +- ci: Fix starknet foundry sncast not found +- fix: Ensure transaction checks are compatible with starknet-rs +- ci: Run Starknet Foundry tests against Madara RPC +- fix: add name, symbol and decimals to fee token storage +- fix: dependencies for dockerfile and binaries +- docs: add translation of madara beast article to spanish +- chore: update starknet-js version in faucet-setup docs +- dev(compilation): add incremental compilation +- feat(rpc): add support for bulk estimate fee +- feat: add argent multicall contract to genesis +- chore(data-availability): update avail-subxt to version 0.4.0 +- fix(ci): setup should fetch files from local config +- chore: deprecate `madara-app` and `madara-dev-explorer` modules +- chore(data-availability-avail): implement fire and forget, and add ws + reconnection logic +- chore: update `polkadot-sdk` to `release-polkadot-v1.3.0` + ## v0.5.0 - chore: release v0.5.0 @@ -50,7 +72,6 @@ madara node - feat(cache-option): add an option to enable aggressive caching in command-line parameters -- fix: Ensure transaction checks are compatible with starknet-rs ## v0.4.0 diff --git a/Cargo.lock b/Cargo.lock index ecb296a458..878d0f3729 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -110,7 +110,7 @@ dependencies = [ "cipher 0.3.0", "ctr 0.8.0", "ghash 0.4.4", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -124,7 +124,7 @@ dependencies = [ "cipher 0.4.4", "ctr 0.9.2", "ghash 0.5.0", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -149,25 +149,26 @@ dependencies = [ [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", - "getrandom 0.2.10", + "getrandom 0.2.11", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -196,6 +197,28 @@ dependencies = [ "smol_str", ] +[[package]] +name = "alsa" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2562ad8dcf0f789f65c6fdaad8a8a9708ed6b488e649da28c01656ad66b8b47" +dependencies = [ + "alsa-sys", + "bitflags 1.3.2", + "libc", + "nix", +] + +[[package]] +name = "alsa-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db8fee663d06c4e303404ef5f40488a53e062f89ba8bfed81f42325aafad1527" +dependencies = [ + "libc", + "pkg-config", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -283,12 +306,49 @@ dependencies = [ "num-traits 0.2.17", ] +[[package]] +name = "aquamarine" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df752953c49ce90719c7bf1fc587bc8227aed04732ea0c0f85e5397d7fdbd1a1" +dependencies = [ + "include_dir", + "itertools 0.10.5", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "arc-swap" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +[[package]] +name = "ark-bls12-377" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb00293ba84f51ce3bd026bd0de55899c4e68f0a39a5728cebae3a73ffdc0a4f" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-std 0.4.0", +] + +[[package]] +name = "ark-bls12-381" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c775f0d12169cba7aae4caeb547bb6a50781c7449a8aa53793827c9ec4abf488" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", +] + [[package]] name = "ark-ec" version = "0.4.2" @@ -306,6 +366,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ark-ed-on-bls12-381-bandersnatch" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cde0f2aa063a2a5c28d39b47761aa102bda7c13c84fc118a61b87c7b2f785c" +dependencies = [ + "ark-bls12-381", + "ark-ec", + "ark-ff 0.4.2", + "ark-std 0.4.0", +] + [[package]] name = "ark-ff" version = "0.3.0" @@ -402,6 +474,20 @@ dependencies = [ "hashbrown 0.13.2", ] +[[package]] +name = "ark-scale" +version = "0.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bd73bb6ddb72630987d37fa963e99196896c0d0ea81b7c894567e74a2f83af" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "parity-scale-codec", + "scale-info", +] + [[package]] name = "ark-secp256k1" version = "0.4.0" @@ -413,6 +499,21 @@ dependencies = [ "ark-std 0.4.0", ] +[[package]] +name = "ark-secret-scalar" +version = "0.0.2" +source = "git+https://github.com/w3f/ring-vrf?rev=3ddc20?rev=3ddc20#3ddc2051066c4b3f0eadd0ba5700df12500d9754" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "ark-transcript", + "digest 0.10.7", + "rand_core 0.6.4", + "zeroize", +] + [[package]] name = "ark-serialize" version = "0.3.0" @@ -466,6 +567,19 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "ark-transcript" +version = "0.0.2" +source = "git+https://github.com/w3f/ring-vrf?rev=3ddc20?rev=3ddc20#3ddc2051066c4b3f0eadd0ba5700df12500d9754" +dependencies = [ + "ark-ff 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "digest 0.10.7", + "rand_core 0.6.4", + "sha3", +] + [[package]] name = "array-bytes" version = "4.2.0" @@ -474,9 +588,9 @@ checksum = "f52f63c5c1316a16a4b35eaac8b76a98248961a533f061684cb2a7cb0eafb6c6" [[package]] name = "array-bytes" -version = "6.1.0" +version = "6.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b1c5a481ec30a5abd8dfbd94ab5cf1bb4e9a66be7f1b3b322f2f1170c200fd" +checksum = "de17a919934ad8c5cc99a1a74de4e2dab95d6121a8f27f94755ff525b630382c" [[package]] name = "array-init" @@ -622,9 +736,9 @@ dependencies = [ "log", "parking", "polling", - "rustix 0.37.26", + "rustix 0.37.27", "slab", - "socket2 0.4.9", + "socket2 0.4.10", "waker-fn", ] @@ -639,26 +753,15 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.0.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45e900cdcd39bb94a14487d3f7ef92ca222162e6c7c3fe7cb3550ea75fb486ed" +checksum = "deb2ab2aa8a746e221ab826c73f48bc6ba41be6763f0855cb249eb6d154cf1d7" dependencies = [ - "event-listener 3.0.1", + "event-listener 3.1.0", "event-listener-strategy", "pin-project-lite 0.2.13", ] -[[package]] -name = "async-recursion" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.38", -] - [[package]] name = "async-trait" version = "0.1.74" @@ -667,7 +770,7 @@ checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -731,8 +834,8 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "avail-subxt" -version = "0.3.0" -source = "git+https://github.com/availproject/avail?rev=0958c6ed499497b70a33ab072dcbe86c762f3976#0958c6ed499497b70a33ab072dcbe86c762f3976" +version = "0.4.0" +source = "git+https://github.com/availproject/avail?tag=v1.8.0.0#9c5f37b92303991dc9cd39534d9298e2298114fd" dependencies = [ "anyhow", "curve25519-dalek 2.1.3", @@ -746,7 +849,7 @@ dependencies = [ "schnorrkel", "serde", "serde-hex", - "sp-core 21.0.0", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "structopt", "subxt", "tokio", @@ -767,6 +870,27 @@ dependencies = [ "rustc-demangle", ] +[[package]] +name = "bandersnatch_vrfs" +version = "0.0.4" +source = "git+https://github.com/w3f/ring-vrf?rev=3ddc20?rev=3ddc20#3ddc2051066c4b3f0eadd0ba5700df12500d9754" +dependencies = [ + "ark-bls12-381", + "ark-ec", + "ark-ed-on-bls12-381-bandersnatch", + "ark-ff 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "dleq_vrf", + "fflonk", + "merlin 3.0.0", + "rand_chacha 0.3.1", + "rand_core 0.6.4", + "ring 0.1.0", + "sha2 0.10.8", + "zeroize", +] + [[package]] name = "base-x" version = "0.2.11" @@ -799,9 +923,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.4" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" [[package]] name = "base64ct" @@ -836,15 +960,6 @@ dependencies = [ "serde", ] -[[package]] -name = "binary-merkle-tree" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "hash-db", - "log", -] - [[package]] name = "bincode" version = "1.3.3" @@ -880,7 +995,27 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.38", + "syn 2.0.39", +] + +[[package]] +name = "bindgen" +version = "0.69.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ffcebc3849946a7170a05992aac39da343a90676ab392c51a4280981d6379c2" +dependencies = [ + "bitflags 2.4.1", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.39", ] [[package]] @@ -922,6 +1057,18 @@ dependencies = [ "wyz", ] +[[package]] +name = "blake2" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94cb07b0da6a73955f8fb85d24c466778e70cda767a568229b104f0264089330" +dependencies = [ + "byte-tools", + "crypto-mac 0.7.0", + "digest 0.8.1", + "opaque-debug 0.2.3", +] + [[package]] name = "blake2" version = "0.10.6" @@ -1024,7 +1171,7 @@ checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" [[package]] name = "blockifier" version = "0.1.0-rc2" -source = "git+https://github.com/keep-starknet-strange/blockifier?branch=no_std-support-7578442#37d3e3b64123b6c31558a883ee5e5f68ffb582f4" +source = "git+https://github.com/keep-starknet-strange/blockifier?branch=no_std-support-7578442#59ab602d50735f01a024b4542f066e6b1a154fcb" dependencies = [ "ark-ff 0.4.2", "ark-secp256k1", @@ -1051,9 +1198,9 @@ dependencies = [ "serde", "serde_json", "sha3", - "sp-arithmetic 6.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "spin 0.9.8", - "starknet-crypto 0.5.1", + "starknet-crypto 0.5.2", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", "strum 0.24.1", "strum_macros 0.24.3", @@ -1090,9 +1237,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79ad7fb2dd38f3dabd76b09c6a5a20c038fc0213ef1e9afd30eb777f120f019" +checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" dependencies = [ "memchr", "serde", @@ -1167,6 +1314,16 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "c2-chacha" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d27dae93fe7b1e0424dc57179ac396908c26b035a87234809f5c4dfd1b47dc80" +dependencies = [ + "cipher 0.2.5", + "ppv-lite86", +] + [[package]] name = "cached" version = "0.44.0" @@ -1222,11 +1379,11 @@ dependencies = [ [[package]] name = "cairo-lang-casm" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc7f7cb89bc3f52c2c738f3e87c8f8773bd3456cae1d322d100d4b0da584f3c" +checksum = "c0cca7891c0df31a87740acbcda3f3c04e6516e283b67842386873f3a181fd91" dependencies = [ - "cairo-lang-utils 2.2.0", + "cairo-lang-utils 2.3.1", "indoc", "num-bigint", "num-traits 0.2.17", @@ -1276,27 +1433,25 @@ dependencies = [ [[package]] name = "cairo-lang-compiler" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4f2c54b065f7fd97bf8d5df76cbcbbd01d8a8c319d281796ee20ecc48e16ca8" +checksum = "f8c4bd031bf62046af88e75b86f419ad7e2317c3b7ee26cbad367f2ff2f2bfa4" dependencies = [ "anyhow", - "cairo-lang-defs 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-lowering 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-plugins 2.2.0", - "cairo-lang-project 2.2.0", - "cairo-lang-semantic 2.2.0", - "cairo-lang-sierra 2.2.0", - "cairo-lang-sierra-generator 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-defs 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-lowering 2.3.1", + "cairo-lang-parser 2.3.1", + "cairo-lang-plugins 2.3.1", + "cairo-lang-project 2.3.1", + "cairo-lang-semantic 2.3.1", + "cairo-lang-sierra 2.3.1", + "cairo-lang-sierra-generator 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", "itertools 0.11.0", - "log", "salsa", - "smol_str", "thiserror", ] @@ -1310,11 +1465,11 @@ dependencies = [ [[package]] name = "cairo-lang-debug" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "873ba77d4c3f780c727c7d6c738cded22b3f6d4023e30546dfe14f97a087887e" +checksum = "954529b40c914ff089bd06b4cdfa3b51f39fb8769a6f9af92ba745e4a1300bd4" dependencies = [ - "cairo-lang-utils 2.2.0", + "cairo-lang-utils 2.3.1", ] [[package]] @@ -1336,17 +1491,16 @@ dependencies = [ [[package]] name = "cairo-lang-defs" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5031fff038c27ed43769b73a6f5d41aeaea34df9af862e024c23fbb4f076249" +checksum = "2a2ab80b21943392da07b2ee54f1f7e15ac783ea1567ed27bd4682774713f7ee" dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", - "indexmap 2.0.2", + "cairo-lang-debug 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-parser 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", "itertools 0.11.0", "salsa", "smol_str", @@ -1366,15 +1520,14 @@ dependencies = [ [[package]] name = "cairo-lang-diagnostics" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b6cb1492e5784e1076320a5018ce7584f391b2f3b414bc0a8ab7c289fa118ce" +checksum = "07052c58dc014904bfecc6fb253a0461bbdcdd3ac41f1385ac9fba5ef9a0da61" dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-debug 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-utils 2.3.1", "itertools 0.11.0", - "salsa", ] [[package]] @@ -1390,14 +1543,12 @@ dependencies = [ [[package]] name = "cairo-lang-eq-solver" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c35dddbc63b2a4870891cc74498726aa32bfaa518596352f9bb101411cc4c584" +checksum = "eac351e6a4af689df90119d95d8fa9441b8ad1b2eef6f4868ed7a1c1808f786c" dependencies = [ - "cairo-lang-utils 2.2.0", + "cairo-lang-utils 2.3.1", "good_lp", - "indexmap 2.0.2", - "itertools 0.11.0", ] [[package]] @@ -1415,12 +1566,12 @@ dependencies = [ [[package]] name = "cairo-lang-filesystem" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ce0b8e66a6085ae157d43b5c162d60166f0027d6f125c50ee74e4dc7916ff6" +checksum = "77f253875f0503f13d2a15e303db4f77a932a84600787a496938d0daf687945d" dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-debug 2.3.1", + "cairo-lang-utils 2.3.1", "path-clean", "salsa", "serde", @@ -1454,28 +1605,27 @@ dependencies = [ [[package]] name = "cairo-lang-lowering" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29cc679f501725e03ee703559ed27d084c6f4031bd51ff86378cf845a85ee207" -dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-defs 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-proc-macros 2.2.0", - "cairo-lang-semantic 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa602a50c7d216beb4c261036b024b24f90ce6724d623f1b23f56076584473c" +dependencies = [ + "cairo-lang-debug 2.3.1", + "cairo-lang-defs 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-parser 2.3.1", + "cairo-lang-proc-macros 2.3.1", + "cairo-lang-semantic 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", "id-arena", - "indexmap 2.0.2", + "indexmap 2.1.0", "itertools 0.11.0", "log", "num-bigint", "num-traits 0.2.17", "once_cell", "salsa", - "smol_str", ] [[package]] @@ -1500,18 +1650,17 @@ dependencies = [ [[package]] name = "cairo-lang-parser" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdcadb046659134466bc7e11961ea8a56969dae8a54d8f985955ce0b95185c7f" +checksum = "b25e847ef219635b837cbfd8eed797a7aa6a4b01e1775065cff67b1d5bfda1fe" dependencies = [ - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-syntax-codegen 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-syntax-codegen 2.3.1", + "cairo-lang-utils 2.3.1", "colored", "itertools 0.11.0", - "log", "num-bigint", "num-traits 0.2.17", "salsa", @@ -1541,20 +1690,19 @@ dependencies = [ [[package]] name = "cairo-lang-plugins" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4632790cd4ea11d4849934456a400eae7ed419f6d721f24a6b637df67b7e902f" +checksum = "4c109f0b788a95bb86cff0e3917e1ce7d75210020fc53904d2a5e3ba54728adb" dependencies = [ - "cairo-lang-defs 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-defs 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-parser 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", "indent", "indoc", "itertools 0.11.0", - "num-bigint", "salsa", "smol_str", ] @@ -1566,18 +1714,18 @@ source = "git+https://github.com/keep-starknet-strange/cairo.git?branch=no_std-s dependencies = [ "cairo-lang-debug 2.1.0", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "cairo-lang-proc-macros" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "170838817fc33ddb65e0a9480526df0b226b148a0fca0a5cd7071be4c6683157" +checksum = "d2bbbfe1934e11fe3cce4f23cdccd22341ed63af5d76e593234288dd4ba06f56" dependencies = [ - "cairo-lang-debug 2.2.0", + "cairo-lang-debug 2.3.1", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -1595,12 +1743,12 @@ dependencies = [ [[package]] name = "cairo-lang-project" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4162ee976c61fdeb3b621f4a76fd256e46a5c0890f750a3a9d2c9560a3bc1daf" +checksum = "e2ba814a9dd17b1341204d8e7bb67775aadebc5138a475bdf176dff0f11999cb" dependencies = [ - "cairo-lang-filesystem 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-utils 2.3.1", "serde", "smol_str", "thiserror", @@ -1632,22 +1780,20 @@ dependencies = [ [[package]] name = "cairo-lang-semantic" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13e544fa9a222bf2d007df2b5fc9b21c2a20ab7e17d6fefbcbc193de209451cd" -dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-defs 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-plugins 2.2.0", - "cairo-lang-proc-macros 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19678648e0efec3f837c0d75b6071bc2afe5c4bc611e177381478c023b72a74c" +dependencies = [ + "cairo-lang-debug 2.3.1", + "cairo-lang-defs 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-parser 2.3.1", + "cairo-lang-proc-macros 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", "id-arena", "itertools 0.11.0", - "log", "num-bigint", "num-traits 0.2.17", "once_cell", @@ -1679,11 +1825,12 @@ dependencies = [ [[package]] name = "cairo-lang-sierra" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5e136b79e95a14ef38a2be91a67ceb85317407d336a5b0d418c33b23c78596a" +checksum = "79b7d09f0b7461701a9ba5d7a2260551b5026cd8f6efc6ca9ca270f6c0a6fd23" dependencies = [ - "cairo-lang-utils 2.2.0", + "anyhow", + "cairo-lang-utils 2.3.1", "const-fnv1a-hash", "convert_case 0.6.0", "derivative", @@ -1715,14 +1862,14 @@ dependencies = [ [[package]] name = "cairo-lang-sierra-ap-change" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "511ca7708faa7ba8d14ae26e1d60ead2d02028c8f664baf5ecb0fd6a0d1e20f6" +checksum = "3e66740bcfadb365d488ff9c334f68cb4cb6a6cb9666ae12109fc6eee7371116" dependencies = [ - "cairo-lang-eq-solver 2.2.0", - "cairo-lang-sierra 2.2.0", - "cairo-lang-sierra-type-size 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-eq-solver 2.3.1", + "cairo-lang-sierra 2.3.1", + "cairo-lang-sierra-type-size 2.3.1", + "cairo-lang-utils 2.3.1", "itertools 0.11.0", "thiserror", ] @@ -1742,14 +1889,14 @@ dependencies = [ [[package]] name = "cairo-lang-sierra-gas" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "351a25bc010b910919c01d5c57e937b0c3d330fc30d92702c0cb4061819df8df" +checksum = "ac27c07af11fcdc9546a9c55c1463cb871fb5b7af1daa3cdf31cfb0872da3d88" dependencies = [ - "cairo-lang-eq-solver 2.2.0", - "cairo-lang-sierra 2.2.0", - "cairo-lang-sierra-type-size 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-eq-solver 2.3.1", + "cairo-lang-sierra 2.3.1", + "cairo-lang-sierra-type-size 2.3.1", + "cairo-lang-utils 2.3.1", "itertools 0.11.0", "thiserror", ] @@ -1782,24 +1929,20 @@ dependencies = [ [[package]] name = "cairo-lang-sierra-generator" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "114091bb971c06fd072aca816af1c3f62566cd8a4b1453c786155161a36c7bce" -dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-defs 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-lowering 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-plugins 2.2.0", - "cairo-lang-proc-macros 2.2.0", - "cairo-lang-semantic 2.2.0", - "cairo-lang-sierra 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", - "id-arena", - "indexmap 2.0.2", +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456cd75547a127b8f4088216a419d317c753c6b9188e944846bf3a5193c14797" +dependencies = [ + "cairo-lang-debug 2.3.1", + "cairo-lang-defs 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-lowering 2.3.1", + "cairo-lang-parser 2.3.1", + "cairo-lang-semantic 2.3.1", + "cairo-lang-sierra 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", "itertools 0.11.0", "num-bigint", "once_cell", @@ -1830,21 +1973,20 @@ dependencies = [ [[package]] name = "cairo-lang-sierra-to-casm" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa1c799de62972dfd7112d563000695be94305b6f7d9bedd29f347799bf03e1c" +checksum = "da74c7c4a2df66b961a982396e0f5221d6594266aed48c76d8c22d5b0d96af5d" dependencies = [ "assert_matches", "cairo-felt 0.8.7", - "cairo-lang-casm 2.2.0", - "cairo-lang-sierra 2.2.0", - "cairo-lang-sierra-ap-change 2.2.0", - "cairo-lang-sierra-gas 2.2.0", - "cairo-lang-sierra-type-size 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-casm 2.3.1", + "cairo-lang-sierra 2.3.1", + "cairo-lang-sierra-ap-change 2.3.1", + "cairo-lang-sierra-gas 2.3.1", + "cairo-lang-sierra-type-size 2.3.1", + "cairo-lang-utils 2.3.1", "indoc", "itertools 0.11.0", - "log", "num-bigint", "num-traits 0.2.17", "thiserror", @@ -1861,12 +2003,12 @@ dependencies = [ [[package]] name = "cairo-lang-sierra-type-size" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fe73d9d58aaf9088f6ba802bcf43ce9ca4bd198190cf5bf91caa7d408dd11a" +checksum = "a7fdf2dbda71f1ed4e4020914e7494ad32db84dbc75cc8dbf05c06caef678fc8" dependencies = [ - "cairo-lang-sierra 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-sierra 2.3.1", + "cairo-lang-utils 2.3.1", ] [[package]] @@ -1912,34 +2054,29 @@ dependencies = [ [[package]] name = "cairo-lang-starknet" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75df624e71e33a31a924e799dd2a9a8284204b41d8db9c51803317bd9edff81f" +checksum = "9217e979f11980609d13d3a5adea8438ec9345709ddfebca975cc9cd1f85201e" dependencies = [ "anyhow", "cairo-felt 0.8.7", - "cairo-lang-casm 2.2.0", - "cairo-lang-compiler 2.2.0", - "cairo-lang-defs 2.2.0", - "cairo-lang-diagnostics 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-lowering 2.2.0", - "cairo-lang-parser 2.2.0", - "cairo-lang-plugins 2.2.0", - "cairo-lang-semantic 2.2.0", - "cairo-lang-sierra 2.2.0", - "cairo-lang-sierra-ap-change 2.2.0", - "cairo-lang-sierra-gas 2.2.0", - "cairo-lang-sierra-generator 2.2.0", - "cairo-lang-sierra-to-casm 2.2.0", - "cairo-lang-syntax 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-casm 2.3.1", + "cairo-lang-compiler 2.3.1", + "cairo-lang-defs 2.3.1", + "cairo-lang-diagnostics 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-lowering 2.3.1", + "cairo-lang-semantic 2.3.1", + "cairo-lang-sierra 2.3.1", + "cairo-lang-sierra-generator 2.3.1", + "cairo-lang-sierra-to-casm 2.3.1", + "cairo-lang-syntax 2.3.1", + "cairo-lang-utils 2.3.1", + "const_format", "convert_case 0.6.0", - "genco", "indent", "indoc", "itertools 0.11.0", - "log", "num-bigint", "num-integer", "num-traits 0.2.17", @@ -1969,18 +2106,17 @@ dependencies = [ [[package]] name = "cairo-lang-syntax" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b1af0ae21f9e539f97cfdf56f5ce0934dae5d87f568fd778c3d624a102f8dbb" +checksum = "2d461d88e09ba7055822eb42d6b2c2ea38a4eaa5b9e4196d8f63db48c563fb56" dependencies = [ - "cairo-lang-debug 2.2.0", - "cairo-lang-filesystem 2.2.0", - "cairo-lang-utils 2.2.0", + "cairo-lang-debug 2.3.1", + "cairo-lang-filesystem 2.3.1", + "cairo-lang-utils 2.3.1", "num-bigint", "num-traits 0.2.17", "salsa", "smol_str", - "thiserror", "unescaper", ] @@ -1995,9 +2131,9 @@ dependencies = [ [[package]] name = "cairo-lang-syntax-codegen" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "822ffabf24f6a5506262edcece315260a82d9dfba3abe6548791a6d654563ad0" +checksum = "9615745282c0c0d3a255c2ac2665a18ae1d163c54285014d85dacda2d5e53637" dependencies = [ "genco", "xshell", @@ -2021,14 +2157,13 @@ dependencies = [ [[package]] name = "cairo-lang-utils" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f974b6e859f0b09c0f13ec8188c96e9e8bbb5da04214f911dbb5bcda67cb812b" +checksum = "15edcd2fba78af9b753614885464c9b5bf6041b7decba46587c2b0babc4197ac" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "itertools 0.11.0", "num-bigint", - "num-integer", "num-traits 0.2.17", "parity-scale-codec", "schemars", @@ -2090,7 +2225,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "sha3", - "starknet-crypto 0.5.1", + "starknet-crypto 0.5.2", "thiserror-no-std", ] @@ -2158,7 +2293,7 @@ checksum = "5aca1a8fbc20b50ac9673ff014abfb2b5f4085ee1a850d408f14a159c5853ac7" dependencies = [ "aead 0.3.2", "cipher 0.2.5", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -2181,7 +2316,7 @@ source = "git+https://github.com/eigerco/celestia-node-rs?rev=bd6394b66b11065c54 dependencies = [ "celestia-types", "http", - "jsonrpsee 0.20.2", + "jsonrpsee 0.20.3", "serde", "thiserror", ] @@ -2191,15 +2326,15 @@ name = "celestia-types" version = "0.1.0" source = "git+https://github.com/eigerco/celestia-node-rs?rev=bd6394b66b11065c543ab3f19fd66000a72b6236#bd6394b66b11065c543ab3f19fd66000a72b6236" dependencies = [ - "base64 0.21.4", + "base64 0.21.5", "bech32", "bytes", "celestia-proto", "cid 0.10.1", "const_format", "enum_dispatch", - "libp2p-identity 0.2.7", - "multiaddr 0.18.0", + "libp2p-identity 0.2.8", + "multiaddr 0.18.1", "nmt-rs", "ruint", "serde", @@ -2210,6 +2345,12 @@ dependencies = [ "thiserror", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cexpr" version = "0.6.0" @@ -2225,7 +2366,7 @@ version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03915af431787e6ffdcc74c645077518c6b6e01f80b761e0fbbfa288536311b3" dependencies = [ - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] @@ -2240,27 +2381,36 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" +[[package]] +name = "chacha" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf3c081b5fba1e5615640aae998e0fbd10c24cbd897ee39ed754a77601a4862" +dependencies = [ + "byteorder", + "keystream", +] + [[package]] name = "chacha20" -version = "0.8.2" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" dependencies = [ "cfg-if", - "cipher 0.3.0", + "cipher 0.4.4", "cpufeatures", - "zeroize", ] [[package]] name = "chacha20poly1305" -version = "0.9.1" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18446b09be63d457bbec447509e85f662f32952b035ce892290396bc0b0cff5" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" dependencies = [ - "aead 0.4.3", + "aead 0.5.2", "chacha20", - "cipher 0.3.0", + "cipher 0.4.4", "poly1305", "zeroize", ] @@ -2282,13 +2432,13 @@ dependencies = [ [[package]] name = "cid" -version = "0.8.6" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ed9c8b2d17acb8110c46f1da5bf4a696d745e1474a16db0cd2b49cd0249bf2" +checksum = "b9b68e3193982cd54187d71afdb2a271ad4cf8af157858e9cb911b91321de143" dependencies = [ "core2", "multibase", - "multihash 0.16.3", + "multihash 0.17.0", "serde", "unsigned-varint", ] @@ -2332,15 +2482,7 @@ checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ "crypto-common", "inout", -] - -[[package]] -name = "ckb-merkle-mountain-range" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ccb671c5921be8a84686e6212ca184cb1d7c51cadcdbfcbd1cc3f042f5dfb8" -dependencies = [ - "cfg-if", + "zeroize", ] [[package]] @@ -2371,9 +2513,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.7" +version = "4.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b" +checksum = "2275f18819641850fa26c89acc84d465c1bf91ce57bc2748b28c420473352f64" dependencies = [ "clap_builder", "clap_derive", @@ -2381,9 +2523,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.7" +version = "4.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663" +checksum = "07cdf1b148b25c1e1f7a42225e30a0d99a615cd4637eae7365548dd4529b95bc" dependencies = [ "anstream", "anstyle", @@ -2400,7 +2542,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -2409,6 +2551,12 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +[[package]] +name = "claxon" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bfbf56724aa9eca8afa4fcfadeb479e722935bb2a0900c2d37e0cc477af0688" + [[package]] name = "codespan-reporting" version = "0.11.1" @@ -2457,7 +2605,7 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5286a0843c21f8367f7be734f89df9b822e0321d8bcce8d6e735aadff7d74979" dependencies = [ - "base64 0.21.4", + "base64 0.21.5", "bech32", "bs58 0.5.0", "digest 0.10.7", @@ -2489,25 +2637,56 @@ dependencies = [ ] [[package]] -name = "comfy-table" -version = "6.2.0" +name = "combine" +version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e959d788268e3bf9d35ace83e81b124190378e4c91c9067524675e33394b8ba" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ - "strum 0.24.1", - "strum_macros 0.24.3", - "unicode-width", + "bytes", + "memchr", ] [[package]] -name = "concurrent-queue" -version = "2.3.0" +name = "comfy-table" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +checksum = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686" dependencies = [ - "crossbeam-utils", -] - + "strum 0.25.0", + "strum_macros 0.25.3", + "unicode-width", +] + +[[package]] +name = "common" +version = "0.1.0" +source = "git+https://github.com/burdges/ring-proof?branch=patch-1#05a756076cb20f981a52afea3a620168de49f95f" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-poly", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "fflonk", + "merlin 3.0.0", + "rand_chacha 0.3.1", +] + +[[package]] +name = "common-path" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" + +[[package]] +name = "concurrent-queue" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "console" version = "0.15.7" @@ -2529,13 +2708,14 @@ checksum = "32b13ea120a812beba79e34316b3942a857c86ec1593cb34f27bb28272ce2cca" [[package]] name = "const-hex" -version = "1.9.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c37be52ef5e3b394db27a2341010685ad5103c72ac15ce2e9420a7e8f93f342c" +checksum = "a5104de16b218eddf8e34ffe2f86f74bfa4e61e95a1b89732fccf6325efd0557" dependencies = [ "cfg-if", "cpufeatures", "hex", + "proptest", "serde", ] @@ -2545,6 +2725,26 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" +[[package]] +name = "const-random" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aaf16c9c2c612020bcfd042e170f6e32de9b9d75adb5277cdbbd2e2c8c8299a" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.11", + "once_cell", + "tiny-keccak", +] + [[package]] name = "const_format" version = "0.2.32" @@ -2577,6 +2777,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +[[package]] +name = "constcat" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd7e35aee659887cbfb97aaf227ac12cad1a9d7c71e55ff3376839ed4e282d08" + [[package]] name = "convert_case" version = "0.4.0" @@ -2617,6 +2823,51 @@ dependencies = [ "memchr", ] +[[package]] +name = "coreaudio-rs" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "321077172d79c662f64f5071a03120748d5bb652f5231570141be24cfcd2bace" +dependencies = [ + "bitflags 1.3.2", + "core-foundation-sys", + "coreaudio-sys", +] + +[[package]] +name = "coreaudio-sys" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3120ebb80a9de008e638ad833d4127d50ea3d3a960ea23ea69bc66d9358a028" +dependencies = [ + "bindgen 0.69.1", +] + +[[package]] +name = "cpal" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d959d90e938c5493000514b446987c07aed46c668faaa7d34d6c7a67b1a578c" +dependencies = [ + "alsa", + "core-foundation-sys", + "coreaudio-rs", + "dasp_sample", + "jni 0.19.0", + "js-sys", + "libc", + "mach2", + "ndk", + "ndk-context", + "oboe", + "once_cell", + "parking_lot 0.12.1", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows 0.46.0", +] + [[package]] name = "cpp_demangle" version = "0.3.5" @@ -2628,9 +2879,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] @@ -2660,7 +2911,7 @@ dependencies = [ "hashbrown 0.13.2", "log", "regalloc2", - "smallvec 1.11.1", + "smallvec 1.11.2", "target-lexicon", ] @@ -2696,7 +2947,7 @@ checksum = "64a25d9d0a0ae3079c463c34115ec59507b4707175454f0eee0891e83e30e82d" dependencies = [ "cranelift-codegen", "log", - "smallvec 1.11.1", + "smallvec 1.11.2", "target-lexicon", ] @@ -2728,7 +2979,7 @@ dependencies = [ "cranelift-frontend", "itertools 0.10.5", "log", - "smallvec 1.11.1", + "smallvec 1.11.2", "wasmparser", "wasmtime-types", ] @@ -2744,9 +2995,9 @@ dependencies = [ [[package]] name = "crc-catalog" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" @@ -2804,19 +3055,19 @@ checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array 0.14.7", "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", "zeroize", ] [[package]] name = "crypto-bigint" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "740fe28e594155f10cfc383984cbefd529d7396050557148f79cb0f621204124" +checksum = "28f85c3514d2a6e64160359b45a3918c3b4178bcbf4ae5d03ab2d02e521c479a" dependencies = [ "generic-array 0.14.7", "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -2831,6 +3082,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "crypto-mac" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4434400df11d95d556bac068ddfedd482915eb18fe8bea89bc80b6e4b1c179e5" +dependencies = [ + "generic-array 0.12.4", + "subtle 1.0.0", +] + [[package]] name = "crypto-mac" version = "0.8.0" @@ -2838,7 +3099,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ "generic-array 0.14.7", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -2848,7 +3109,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" dependencies = [ "generic-array 0.14.7", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -2878,7 +3139,7 @@ dependencies = [ "byteorder", "digest 0.8.1", "rand_core 0.5.1", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -2891,7 +3152,7 @@ dependencies = [ "byteorder", "digest 0.9.0", "rand_core 0.5.1", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -2906,21 +3167,21 @@ dependencies = [ "curve25519-dalek-derive", "digest 0.10.7", "fiat-crypto", - "platforms 3.1.2", + "platforms", "rustc_version 0.4.0", - "subtle", + "subtle 2.4.1", "zeroize", ] [[package]] name = "curve25519-dalek-derive" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -2938,9 +3199,9 @@ dependencies = [ [[package]] name = "cxx" -version = "1.0.109" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c390c123d671cc547244943ecad81bdaab756c6ea332d9ca9c1f48d952a24895" +checksum = "7129e341034ecb940c9072817cd9007974ea696844fc4dd582dc1653a7fbe2e8" dependencies = [ "cc", "cxxbridge-flags", @@ -2950,9 +3211,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.109" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00d3d3ac9ffb900304edf51ca719187c779f4001bb544f26c4511d621de905cf" +checksum = "a2a24f3f5f8eed71936f21e570436f024f5c2e25628f7496aa7ccd03b90109d5" dependencies = [ "cc", "codespan-reporting", @@ -2960,24 +3221,24 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "cxxbridge-flags" -version = "1.0.109" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94415827ecfea0f0c74c8cad7d1a86ddb3f05354d6a6ddeda0adee5e875d2939" +checksum = "06fdd177fc61050d63f67f5bd6351fac6ab5526694ea8e359cd9cd3b75857f44" [[package]] name = "cxxbridge-macro" -version = "1.0.109" +version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33dbbe9f5621c9247f97ec14213b04f350bff4b6cebefe834c60055db266ecf" +checksum = "587663dd5fb3d10932c8aecfe7c844db1bcf0aee93eeab08fac13dc1212c2e7f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -3025,7 +3286,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -3047,9 +3308,15 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.38", + "syn 2.0.39", ] +[[package]] +name = "dasp_sample" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f" + [[package]] name = "data-encoding" version = "2.4.0" @@ -3240,16 +3507,16 @@ dependencies = [ "block-buffer 0.10.4", "const-oid", "crypto-common", - "subtle", + "subtle 2.4.1", ] [[package]] name = "directories" -version = "4.0.1" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210" +checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" dependencies = [ - "dirs-sys 0.3.7", + "dirs-sys", ] [[package]] @@ -3268,7 +3535,7 @@ version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" dependencies = [ - "dirs-sys 0.4.1", + "dirs-sys", ] [[package]] @@ -3281,17 +3548,6 @@ dependencies = [ "dirs-sys-next", ] -[[package]] -name = "dirs-sys" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - [[package]] name = "dirs-sys" version = "0.4.1" @@ -3323,20 +3579,58 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] -name = "downcast" -version = "0.11.0" +name = "dleq_vrf" +version = "0.0.2" +source = "git+https://github.com/w3f/ring-vrf?rev=3ddc20?rev=3ddc20#3ddc2051066c4b3f0eadd0ba5700df12500d9754" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-scale", + "ark-secret-scalar", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "ark-transcript", + "arrayvec 0.7.4", + "rand_core 0.6.4", + "zeroize", +] + +[[package]] +name = "docify" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" +checksum = "4235e9b248e2ba4b92007fe9c646f3adf0ffde16dc74713eacc92b8bc58d8d2f" +dependencies = [ + "docify_macros", +] [[package]] -name = "downcast-rs" -version = "1.2.0" +name = "docify_macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47020e12d7c7505670d1363dd53d6c23724f71a90a3ae32ff8eba40de8404626" +dependencies = [ + "common-path", + "derive-syn-parse", + "once_cell", + "proc-macro2", + "quote", + "regex", + "syn 2.0.39", + "termcolor", + "toml 0.7.8", + "walkdir", +] + +[[package]] +name = "downcast" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" [[package]] name = "dtoa" @@ -3373,9 +3667,9 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.14" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d2f3407d9a573d666de4b5bdf10569d73ca9478087346697dcbae6244bfbcd" +checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" [[package]] name = "ecdsa" @@ -3499,7 +3793,7 @@ dependencies = [ "pkcs8 0.9.0", "rand_core 0.6.4", "sec1 0.3.0", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -3510,7 +3804,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914" dependencies = [ "base16ct 0.2.0", - "crypto-bigint 0.5.3", + "crypto-bigint 0.5.4", "digest 0.10.7", "ff 0.13.0", "generic-array 0.14.7", @@ -3518,7 +3812,7 @@ dependencies = [ "pkcs8 0.10.2", "rand_core 0.6.4", "sec1 0.7.3", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -3552,7 +3846,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe81b5c06ecfdbc71dd845216f225f53b62a10cb8a16c946836a3467f701d05b" dependencies = [ - "base64 0.21.4", + "base64 0.21.5", "bytes", "hex", "k256", @@ -3585,27 +3879,14 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "env_logger" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", -] - -[[package]] -name = "env_logger" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece" dependencies = [ "humantime", "is-terminal", @@ -3628,9 +3909,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "7c18ee0ed65a5f1f81cac6b1d213b69c35fa47d4252ad41f1486dbd8226fe36e" dependencies = [ "libc", "windows-sys 0.48.0", @@ -3772,7 +4053,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "syn 2.0.38", + "syn 2.0.39", "toml 0.7.8", "walkdir", ] @@ -3790,7 +4071,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -3808,7 +4089,7 @@ dependencies = [ "ethabi", "generic-array 0.14.7", "k256", - "num_enum 0.7.0", + "num_enum 0.7.1", "once_cell", "open-fastrlp", "rand 0.8.5", @@ -3816,7 +4097,7 @@ dependencies = [ "serde", "serde_json", "strum 0.25.0", - "syn 2.0.38", + "syn 2.0.39", "tempfile", "thiserror", "tiny-keccak", @@ -3873,7 +4154,7 @@ checksum = "6838fa110e57d572336178b7c79e94ff88ef976306852d8cb87d9e5b1fc7c0b5" dependencies = [ "async-trait", "auto_impl", - "base64 0.21.4", + "base64 0.21.5", "bytes", "const-hex", "enr", @@ -3961,9 +4242,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "3.0.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cec0252c2afff729ee6f00e903d479fba81784c8e2bd77447673471fdfaea1" +checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" dependencies = [ "concurrent-queue", "parking", @@ -3976,7 +4257,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d96b852f1345da36d551b9473fa1e2b1eb5c5195585c6c018118bc92a8d91160" dependencies = [ - "event-listener 3.0.1", + "event-listener 3.1.0", "pin-project-lite 0.2.13", ] @@ -3991,15 +4272,15 @@ dependencies = [ [[package]] name = "expander" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f360349150728553f92e4c997a16af8915f418d3a0f21b440d34c5632f16ed84" +checksum = "5f86a749cf851891866c10515ef6c299b5c69661465e9c3bbe7e07a2b77fb0f7" dependencies = [ - "blake2", + "blake2 0.10.6", "fs-err", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.39", ] [[package]] @@ -4066,7 +4347,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -4076,14 +4357,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", +] + +[[package]] +name = "fflonk" +version = "0.1.0" +source = "git+https://github.com/w3f/fflonk#1beb0585e1c8488956fac7f05da061f9b41e8948" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-poly", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "merlin 3.0.0", ] [[package]] name = "fiat-crypto" -version = "0.2.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0870c84016d4b481be5c9f323c24f65e31e901ae618f0e80f4308fb00de1d2d" +checksum = "53a56f0780318174bad1c127063fd0c5fdfb35398e3cd79ffaab931a6c79df80" [[package]] name = "file-per-thread-logger" @@ -4091,7 +4385,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84f2e425d9790201ba4af4630191feac6dcc98765b118d4d18e91d23c2353866" dependencies = [ - "env_logger 0.10.0", + "env_logger", "log", ] @@ -4195,7 +4489,7 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "fork-tree" version = "3.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", ] @@ -4218,7 +4512,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support", "frame-support-procedural", @@ -4230,25 +4524,25 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 7.0.0", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-runtime-interface 7.0.0", - "sp-std 5.0.0", - "sp-storage 7.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime-interface 17.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "static_assertions", ] [[package]] name = "frame-benchmarking-cli" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "Inflector", - "array-bytes 4.2.0", + "array-bytes 6.2.0", "chrono", - "clap 4.4.7", + "clap 4.4.8", "comfy-table", "frame-benchmarking", "frame-support", @@ -4273,16 +4567,17 @@ dependencies = [ "serde_json", "sp-api", "sp-blockchain", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-database", - "sp-externalities 0.13.0", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", - "sp-std 5.0.0", - "sp-storage 7.0.0", - "sp-trie 7.0.0", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-wasm-interface 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", "thousands", ] @@ -4290,18 +4585,19 @@ dependencies = [ [[package]] name = "frame-executive" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support", "frame-system", "frame-try-runtime", + "log", "parity-scale-codec", "scale-info", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", - "sp-tracing 6.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-tracing 10.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -4316,21 +4612,33 @@ dependencies = [ "serde", ] +[[package]] +name = "frame-metadata" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cf1549fba25a6fcac22785b61698317d958e96cac72a59102ea45b9ae64692" +dependencies = [ + "cfg-if", + "parity-scale-codec", + "scale-info", + "serde", +] + [[package]] name = "frame-remote-externalities" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "async-recursion", "futures", "indicatif", "jsonrpsee 0.16.3", "log", "parity-scale-codec", "serde", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "spinners", "substrate-rpc-client", "tokio", @@ -4340,79 +4648,88 @@ dependencies = [ [[package]] name = "frame-support" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ + "aquamarine", "bitflags 1.3.2", + "docify", "environmental", - "frame-metadata", + "frame-metadata 16.0.0", "frame-support-procedural", "impl-trait-for-tuples", "k256", "log", - "once_cell", + "macro_magic", "parity-scale-codec", "paste", "scale-info", "serde", - "smallvec 1.11.1", + "serde_json", + "smallvec 1.11.2", "sp-api", - "sp-arithmetic 6.0.0", - "sp-core 7.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-core-hashing-proc-macro", - "sp-debug-derive 5.0.0", + "sp-debug-derive 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-genesis-builder", "sp-inherents", - "sp-io 7.0.0", - "sp-runtime 7.0.0", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-metadata-ir", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-staking", - "sp-state-machine 0.13.0", - "sp-std 5.0.0", - "sp-tracing 6.0.0", - "sp-weights 4.0.0", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-tracing 10.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-weights 20.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "static_assertions", "tt-call", ] [[package]] name = "frame-support-procedural" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "Inflector", "cfg-expr", "derive-syn-parse", + "expander", "frame-support-procedural-tools", "itertools 0.10.5", + "macro_magic", "proc-macro-warning", "proc-macro2", "quote", - "syn 2.0.38", + "sp-core-hashing 9.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "syn 2.0.39", ] [[package]] name = "frame-support-procedural-tools" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support-procedural-tools-derive", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "frame-support-procedural-tools-derive" version = "3.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "frame-system" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "cfg-if", "frame-support", @@ -4420,33 +4737,33 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", - "sp-weights 4.0.0", + "sp-weights 20.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "frame-system-benchmarking" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-benchmarking", "frame-support", "frame-system", "parity-scale-codec", "scale-info", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "frame-system-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "sp-api", @@ -4455,20 +4772,23 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support", "parity-scale-codec", "sp-api", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "fs-err" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541" +checksum = "fb5fd9bcbe8b1087cbd395b51498c01bc997cef73e778a80b77a811af5e2d29f" +dependencies = [ + "autocfg", +] [[package]] name = "fs2" @@ -4480,16 +4800,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "fs4" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eeb4ed9e12f43b7fa0baae3f9cdda28352770132ef2e09a23760c29cae8bd47" -dependencies = [ - "rustix 0.38.20", - "windows-sys 0.48.0", -] - [[package]] name = "funty" version = "2.0.0" @@ -4578,7 +4888,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -4643,24 +4953,24 @@ dependencies = [ [[package]] name = "genco" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4fd234893ffe9cf5b81224ebb1d21bbe2eeb94d95bac3ea25c97cba7293304d" +checksum = "98d7af598790738fee616426e669360fa361273b1b9c9b7f30c92fa627605cad" dependencies = [ "genco-macros", "relative-path", - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] name = "genco-macros" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1c8cd3de2f32ee05ba2adaa90f8d0c354ffa0adeb2d186978d7ae70e5025e9" +checksum = "d4cf186fea4af17825116f72932fe52cce9a13bae39ff63b4dc0cfdb3fb4bde1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -4706,9 +5016,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", "js-sys", @@ -4836,7 +5146,7 @@ checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff 0.12.1", "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -4847,14 +5157,14 @@ checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff 0.13.0", "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", ] [[package]] name = "h2" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" dependencies = [ "bytes", "fnv", @@ -4862,7 +5172,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.1.0", "slab", "tokio", "tokio-util", @@ -4871,9 +5181,9 @@ dependencies = [ [[package]] name = "handlebars" -version = "4.4.0" +version = "4.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c39b3bc2a8f715298032cf5087e58573809374b08160aa7d750582bdb82d2683" +checksum = "faa67bab9ff362228eb3d00bd024a4965d8231bbb7921167f0cfa66c6626b225" dependencies = [ "log", "pest", @@ -4904,7 +5214,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", ] [[package]] @@ -4913,7 +5223,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", ] [[package]] @@ -4922,7 +5232,7 @@ version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "allocator-api2", "serde", ] @@ -4936,6 +5246,15 @@ dependencies = [ "fxhash", ] +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.2", +] + [[package]] name = "heck" version = "0.3.3" @@ -5047,11 +5366,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "hound" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62adaabb884c94955b19907d60019f4e145d091c75345379e70d1ee696f7854f" + [[package]] name = "http" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" dependencies = [ "bytes", "fnv", @@ -5110,7 +5435,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite 0.2.13", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -5119,34 +5444,19 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" -dependencies = [ - "http", - "hyper", - "log", - "rustls 0.20.9", - "rustls-native-certs", - "tokio", - "tokio-rustls 0.23.4", -] - -[[package]] -name = "hyper-rustls" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http", "hyper", "log", - "rustls 0.21.7", + "rustls 0.21.8", "rustls-native-certs", "tokio", - "tokio-rustls 0.24.1", - "webpki-roots 0.23.1", + "tokio-rustls", + "webpki-roots 0.25.2", ] [[package]] @@ -5244,7 +5554,7 @@ dependencies = [ "rtnetlink", "system-configuration", "tokio", - "windows", + "windows 0.51.1", ] [[package]] @@ -5291,6 +5601,25 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "include_dir" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +dependencies = [ + "include_dir_macros", +] + +[[package]] +name = "include_dir_macros" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +dependencies = [ + "proc-macro2", + "quote", +] + [[package]] name = "indent" version = "0.1.1" @@ -5325,9 +5654,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", "hashbrown 0.14.2", @@ -5422,7 +5751,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.4", + "socket2 0.5.5", "widestring", "windows-sys 0.48.0", "winreg", @@ -5430,9 +5759,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-terminal" @@ -5441,7 +5770,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi 0.3.3", - "rustix 0.38.20", + "rustix 0.38.24", "windows-sys 0.48.0", ] @@ -5470,8 +5799,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] -name = "jobserver" -version = "0.1.27" +name = "jni" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "039022cdf4d7b1cf548d31f60ae783138e5fd42013f6271049d7df7afadef96c" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jobserver" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" dependencies = [ @@ -5480,9 +5843,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" dependencies = [ "wasm-bindgen", ] @@ -5506,15 +5869,15 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de902baa44bf34a58b1a4906f8b840d7d60dcec5f41fe08b4dbc14cf9efa821c" +checksum = "affdc52f7596ccb2d7645231fc6163bb314630c989b64998f3699a28b4d5d4dc" dependencies = [ - "jsonrpsee-core 0.20.2", - "jsonrpsee-http-client 0.20.2", - "jsonrpsee-proc-macros 0.20.2", - "jsonrpsee-types 0.20.2", - "jsonrpsee-ws-client 0.20.2", + "jsonrpsee-core 0.20.3", + "jsonrpsee-http-client 0.20.3", + "jsonrpsee-proc-macros 0.20.3", + "jsonrpsee-types 0.20.3", + "jsonrpsee-ws-client 0.20.3", "tracing", ] @@ -5537,7 +5900,7 @@ dependencies = [ "soketto", "thiserror", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", "tokio-util", "tracing", "webpki-roots 0.25.2", @@ -5545,19 +5908,19 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58d9851f8f5653e0433a898e9032bde4910b35d625bd9dcf33ef6e36e7c3d456" +checksum = "b5b005c793122d03217da09af68ba9383363caa950b90d3436106df8cabce935" dependencies = [ "futures-util", "http", - "jsonrpsee-core 0.20.2", + "jsonrpsee-core 0.20.3", "pin-project", "rustls-native-certs", "soketto", "thiserror", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", "tokio-util", "tracing", "url", @@ -5594,9 +5957,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f45d37af23707750136379f6799e76ebfcf2d425ec4e36d0deb7921da5e65c" +checksum = "da2327ba8df2fdbd5e897e2b5ed25ce7f299d345b9736b6828814c3dbd1fd47b" dependencies = [ "anyhow", "async-lock 2.8.0", @@ -5605,7 +5968,7 @@ dependencies = [ "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.20.2", + "jsonrpsee-types 0.20.3", "rustc-hash", "serde", "serde_json", @@ -5622,7 +5985,7 @@ checksum = "7e5f9fabdd5d79344728521bb65e3106b49ec405a78b66fbff073b72b389fa43" dependencies = [ "async-trait", "hyper", - "hyper-rustls 0.24.1", + "hyper-rustls", "jsonrpsee-core 0.16.3", "jsonrpsee-types 0.16.3", "rustc-hash", @@ -5635,15 +5998,15 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02308562f2e8162a32f8d6c3dc19c29c858d5d478047c886a5c3c25b5f7fa868" +checksum = "5f80c17f62c7653ce767e3d7288b793dfec920f97067ceb189ebdd3570f2bc20" dependencies = [ "async-trait", "hyper", - "hyper-rustls 0.24.1", - "jsonrpsee-core 0.20.2", - "jsonrpsee-types 0.20.2", + "hyper-rustls", + "jsonrpsee-core 0.20.3", + "jsonrpsee-types 0.20.3", "serde", "serde_json", "thiserror", @@ -5660,7 +6023,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44e8ab85614a08792b9bff6c8feee23be78c98d0182d4c622c05256ab553892a" dependencies = [ "heck 0.4.1", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -5668,12 +6031,12 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26b3675a943d083d0bf6e367ec755dccec56c41888afa13b191c1c4ff87c652" +checksum = "29110019693a4fa2dbda04876499d098fa16d70eba06b1e6e2b3f1b251419515" dependencies = [ "heck 0.4.1", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -5717,9 +6080,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05eaff23af19f10ba6fbb76519bed6da4d3b9bbaef13d39b7c2b6c14e532d27e" +checksum = "5be0be325642e850ed0bdff426674d2e66b2b7117c9be23a7caef68a2902b7d9" dependencies = [ "anyhow", "beef", @@ -5754,14 +6117,14 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd34d3ab8c09f02fd4c432f256bc8b143b616b222b03050f941ee53f0e8d7b24" +checksum = "bca9cb3933ccae417eb6b08c3448eb1cb46e39834e5b503e395e5e5bd08546c0" dependencies = [ "http", - "jsonrpsee-client-transport 0.20.2", - "jsonrpsee-core 0.20.2", - "jsonrpsee-types 0.20.2", + "jsonrpsee-client-transport 0.20.3", + "jsonrpsee-core 0.20.3", + "jsonrpsee-types 0.20.3", "url", ] @@ -5771,7 +6134,7 @@ version = "8.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" dependencies = [ - "base64 0.21.4", + "base64 0.21.5", "pem", "ring 0.16.20", "serde", @@ -5802,13 +6165,19 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keystream" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33070833c9ee02266356de0c43f723152bd38bd96ddf52c82b3af10c9138b28" + [[package]] name = "kvdb" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7d770dcb02bf6835887c3a979b5107a04ff4bbde97a5f0928d27404a155add9" dependencies = [ - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] @@ -5832,7 +6201,7 @@ dependencies = [ "parking_lot 0.12.1", "regex", "rocksdb", - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] @@ -5882,11 +6251,22 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +[[package]] +name = "lewton" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "777b48df9aaab155475a83a7df3070395ea1ac6902f5cd062b8f2b028075c030" +dependencies = [ + "byteorder", + "ogg", + "tinyvec", +] + [[package]] name = "libc" -version = "0.2.149" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" [[package]] name = "libloading" @@ -5913,7 +6293,7 @@ dependencies = [ "bytes", "futures", "futures-timer", - "getrandom 0.2.10", + "getrandom 0.2.11", "instant", "libp2p-allow-block-list", "libp2p-connection-limits", @@ -5984,7 +6364,7 @@ dependencies = [ "quick-protobuf", "rand 0.8.5", "rw-stream-sink", - "smallvec 1.11.1", + "smallvec 1.11.2", "thiserror", "unsigned-varint", "void", @@ -6000,7 +6380,7 @@ dependencies = [ "libp2p-core", "log", "parking_lot 0.12.1", - "smallvec 1.11.1", + "smallvec 1.11.2", "trust-dns-resolver", ] @@ -6021,7 +6401,7 @@ dependencies = [ "lru 0.10.1", "quick-protobuf", "quick-protobuf-codec", - "smallvec 1.11.1", + "smallvec 1.11.2", "thiserror", "void", ] @@ -6046,17 +6426,17 @@ dependencies = [ [[package]] name = "libp2p-identity" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdd6317441f361babc74c2989c6484eb0726045399b6648de039e1805ea96972" +checksum = "999ec70441b2fb35355076726a6bc466c932e9bdc66f6a11c6c0aa17c7ab9be0" dependencies = [ "bs58 0.5.0", "hkdf", - "log", "multihash 0.19.1", "quick-protobuf", "sha2 0.10.8", "thiserror", + "tracing", ] [[package]] @@ -6080,7 +6460,7 @@ dependencies = [ "quick-protobuf", "rand 0.8.5", "sha2 0.10.8", - "smallvec 1.11.1", + "smallvec 1.11.2", "thiserror", "uint", "unsigned-varint", @@ -6101,8 +6481,8 @@ dependencies = [ "libp2p-swarm", "log", "rand 0.8.5", - "smallvec 1.11.1", - "socket2 0.4.9", + "smallvec 1.11.2", + "socket2 0.4.10", "tokio", "trust-dns-proto", "void", @@ -6197,7 +6577,7 @@ dependencies = [ "libp2p-identity 0.1.3", "libp2p-swarm", "rand 0.8.5", - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] @@ -6216,7 +6596,7 @@ dependencies = [ "libp2p-swarm-derive", "log", "rand 0.8.5", - "smallvec 1.11.1", + "smallvec 1.11.2", "tokio", "void", ] @@ -6244,7 +6624,7 @@ dependencies = [ "libc", "libp2p-core", "log", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", ] @@ -6344,13 +6724,24 @@ dependencies = [ "yamux", ] +[[package]] +name = "libredox" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall 0.4.1", +] + [[package]] name = "librocksdb-sys" version = "0.11.0+8.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3386f101bcb4bd252d8e9d2fb41ec3b0862a15a62b478c355b2982efa469e3e" dependencies = [ - "bindgen", + "bindgen 0.65.1", "bzip2-sys", "cc", "glob", @@ -6386,7 +6777,7 @@ checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" dependencies = [ "crunchy", "digest 0.9.0", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -6465,9 +6856,21 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" + +[[package]] +name = "lioness" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "4ae926706ba42c425c9457121178330d75e273df2e82e28b758faf3de3a9acb9" +dependencies = [ + "arrayref", + "blake2 0.8.1", + "chacha", + "keystream", +] [[package]] name = "lock_api" @@ -6494,15 +6897,6 @@ dependencies = [ "hashbrown 0.12.3", ] -[[package]] -name = "lru" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6e8aaa3f231bb4bd57b84b2d5dc3ae7f350265df8aa96492e0bc394a1571909" -dependencies = [ - "hashbrown 0.12.3", -] - [[package]] name = "lru" version = "0.10.1" @@ -6550,6 +6944,63 @@ dependencies = [ "libc", ] +[[package]] +name = "mach2" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8" +dependencies = [ + "libc", +] + +[[package]] +name = "macro_magic" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e03844fc635e92f3a0067e25fa4bf3e3dbf3f2927bf3aa01bb7bc8f1c428949d" +dependencies = [ + "macro_magic_core", + "macro_magic_macros", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "macro_magic_core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "468155613a44cfd825f1fb0ffa532b018253920d404e6fca1e8d43155198a46d" +dependencies = [ + "const-random", + "derive-syn-parse", + "macro_magic_core_macros", + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "macro_magic_core_macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "macro_magic_macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" +dependencies = [ + "macro_magic_core", + "quote", + "syn 2.0.39", +] + [[package]] name = "madara" version = "0.5.0" @@ -6557,7 +7008,7 @@ dependencies = [ "async-trait", "bincode 1.3.3", "blockifier", - "clap 4.4.7", + "clap 4.4.8", "frame-benchmarking", "frame-benchmarking-cli", "frame-system", @@ -6567,7 +7018,6 @@ dependencies = [ "lazy_static", "log", "madara-runtime", - "mc-block-proposer", "mc-commitment-state-diff", "mc-data-availability", "mc-db", @@ -6575,7 +7025,6 @@ dependencies = [ "mc-mapping-sync", "mc-rpc", "mc-storage", - "mc-transaction-pool", "md5", "mockito", "mp-block", @@ -6585,6 +7034,7 @@ dependencies = [ "pallet-starknet", "parity-scale-codec", "reqwest", + "sc-basic-authorship", "sc-cli", "sc-client-api", "sc-consensus", @@ -6595,9 +7045,11 @@ dependencies = [ "sc-keystore", "sc-network", "sc-network-sync", + "sc-offchain", "sc-rpc-api", "sc-service", "sc-telemetry", + "sc-transaction-pool", "sc-transaction-pool-api", "serde", "serde_json", @@ -6606,14 +7058,15 @@ dependencies = [ "sp-blockchain", "sp-consensus-aura", "sp-consensus-grandpa", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", "sp-keyring", "sp-offchain", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-statement-store", "sp-timestamp", - "sp-trie 7.0.0", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet_api 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "substrate-build-script-utils", @@ -6650,12 +7103,12 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-consensus-aura", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", "sp-offchain", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-session", - "sp-std 5.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-transaction-pool", "sp-version", "starknet-ff", @@ -6715,31 +7168,6 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" -[[package]] -name = "mc-block-proposer" -version = "0.5.0" -dependencies = [ - "futures", - "futures-timer", - "log", - "parity-scale-codec", - "parking_lot 0.12.1", - "sc-block-builder", - "sc-client-api", - "sc-proposer-metrics", - "sc-telemetry", - "sc-transaction-pool", - "sc-transaction-pool-api", - "sp-api", - "sp-blockchain", - "sp-consensus", - "sp-core 7.0.0", - "sp-inherents", - "sp-runtime 7.0.0", - "substrate-prometheus-endpoint", - "substrate-test-runtime-client", -] - [[package]] name = "mc-commitment-state-diff" version = "0.5.0" @@ -6755,7 +7183,7 @@ dependencies = [ "sc-client-api", "sp-api", "sp-blockchain", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", "thiserror", ] @@ -6769,10 +7197,10 @@ dependencies = [ "avail-subxt", "celestia-rpc", "celestia-types", - "clap 4.4.7", + "clap 4.4.8", "ethers", "futures", - "jsonrpsee 0.20.2", + "jsonrpsee 0.20.3", "log", "mc-db", "mp-storage", @@ -6782,9 +7210,9 @@ dependencies = [ "serde_json", "sp-api", "sp-blockchain", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-keyring", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", "subxt", "thiserror", @@ -6804,9 +7232,9 @@ dependencies = [ "parity-db", "parity-scale-codec", "sc-client-db", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-database", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "uuid 1.5.0", ] @@ -6815,7 +7243,7 @@ name = "mc-deoxys" version = "0.1.0" dependencies = [ "blockifier", - "env_logger 0.9.3", + "env_logger", "futures-channel", "hex", "log", @@ -6827,9 +7255,10 @@ dependencies = [ "mp-transactions", "pallet-starknet", "reqwest", + "rodio", "sc-consensus-manual-seal", "serde_json", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet-ff", "starknet-gateway", @@ -6856,8 +7285,8 @@ dependencies = [ "sc-client-api", "sp-api", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -6873,7 +7302,6 @@ dependencies = [ "mc-db", "mc-rpc-core", "mc-storage", - "mc-transaction-pool", "mp-block", "mp-felt", "mp-hashers", @@ -6883,17 +7311,19 @@ dependencies = [ "rstest", "sc-client-api", "sc-network-sync", + "sc-transaction-pool", "sc-transaction-pool-api", "serde_json", "sp-api", - "sp-arithmetic 6.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet-ff", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", "thiserror", + "tokio", ] [[package]] @@ -6920,8 +7350,8 @@ dependencies = [ "serde_with", "sp-api", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", "thiserror", @@ -6941,46 +7371,14 @@ dependencies = [ "sc-client-api", "sp-api", "sp-blockchain", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-storage 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", ] -[[package]] -name = "mc-transaction-pool" -version = "4.0.0-dev" -dependencies = [ - "async-trait", - "futures", - "futures-timer", - "linked-hash-map", - "log", - "num-traits 0.2.17", - "parity-scale-codec", - "parking_lot 0.12.1", - "sc-block-builder", - "sc-client-api", - "sc-transaction-pool", - "sc-transaction-pool-api", - "sc-utils", - "serde", - "sp-api", - "sp-blockchain", - "sp-consensus", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-tracing 6.0.0", - "sp-transaction-pool", - "substrate-prometheus-endpoint", - "substrate-test-runtime", - "substrate-test-runtime-client", - "substrate-test-runtime-transaction-pool", - "thiserror", -] - [[package]] name = "md-5" version = "0.10.6" @@ -7009,7 +7407,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" dependencies = [ - "rustix 0.38.20", + "rustix 0.38.24", ] [[package]] @@ -7057,12 +7455,6 @@ dependencies = [ "hash-db", ] -[[package]] -name = "memory_units" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" - [[package]] name = "merlin" version = "2.0.1" @@ -7075,6 +7467,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "merlin" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" +dependencies = [ + "byteorder", + "keccak", + "rand_core 0.6.4", + "zeroize", +] + [[package]] name = "mime" version = "0.3.17" @@ -7108,15 +7512,40 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" dependencies = [ "libc", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] +[[package]] +name = "mixnet" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa3eb39495d8e2e2947a1d862852c90cc6a4a8845f8b41c8829cb9fcc047f4a" +dependencies = [ + "arrayref", + "arrayvec 0.7.4", + "bitflags 1.3.2", + "blake2 0.10.6", + "c2-chacha", + "curve25519-dalek 4.1.1", + "either", + "hashlink", + "lioness", + "log", + "parking_lot 0.12.1", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_distr", + "subtle 2.4.1", + "thiserror", + "zeroize", +] + [[package]] name = "mockall" version = "0.11.4" @@ -7173,7 +7602,7 @@ dependencies = [ "mp-transactions", "parity-scale-codec", "serde", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", ] @@ -7212,7 +7641,7 @@ dependencies = [ "assert_matches", "mp-block", "parity-scale-codec", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -7223,7 +7652,7 @@ dependencies = [ "hashbrown 0.14.2", "mp-state", "phf", - "sp-arithmetic 6.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", ] @@ -7236,7 +7665,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-ff", "starknet_api 0.4.1 (git+https://github.com/keep-starknet-strange/starknet-api?branch=no_std-support-dc83f05)", "thiserror-no-std", @@ -7260,7 +7689,7 @@ version = "0.5.0" dependencies = [ "async-trait", "parity-scale-codec", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", "thiserror-no-std", ] @@ -7280,7 +7709,7 @@ dependencies = [ "lazy_static", "parity-scale-codec", "serde", - "sp-io 7.0.0", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -7335,14 +7764,14 @@ dependencies = [ [[package]] name = "multiaddr" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92a651988b3ed3ad1bc8c87d016bb92f6f395b84ed1db9b926b32b1fc5a2c8b5" +checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070" dependencies = [ "arrayref", "byteorder", "data-encoding", - "libp2p-identity 0.2.7", + "libp2p-identity 0.2.8", "multibase", "multihash 0.19.1", "percent-encoding", @@ -7365,9 +7794,9 @@ dependencies = [ [[package]] name = "multihash" -version = "0.16.3" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c346cf9999c631f002d8f977c4eaeaa0e6386f16007202308d0b3757522c2cc" +checksum = "835d6ff01d610179fbce3de1694d007e500bf33a7f29689838941d6bf783ae40" dependencies = [ "blake2b_simd", "blake2s_simd", @@ -7380,19 +7809,6 @@ dependencies = [ "unsigned-varint", ] -[[package]] -name = "multihash" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835d6ff01d610179fbce3de1694d007e500bf33a7f29689838941d6bf783ae40" -dependencies = [ - "core2", - "digest 0.10.7", - "multihash-derive", - "sha2 0.10.8", - "unsigned-varint", -] - [[package]] name = "multihash" version = "0.18.1" @@ -7420,7 +7836,7 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro-error", "proc-macro2", "quote", @@ -7444,7 +7860,7 @@ dependencies = [ "futures", "log", "pin-project", - "smallvec 1.11.1", + "smallvec 1.11.2", "unsigned-varint", ] @@ -7515,6 +7931,35 @@ dependencies = [ "rawpointer", ] +[[package]] +name = "ndk" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "451422b7e4718271c8b5b3aadf5adedba43dc76312454b387e98fae0fc951aa0" +dependencies = [ + "bitflags 1.3.2", + "jni-sys", + "ndk-sys", + "num_enum 0.5.11", + "raw-window-handle", + "thiserror", +] + +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "ndk-sys" +version = "0.4.1+23.1.7779620" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cf2aae958bd232cac5069850591667ad422d263686d75b52a065f9badeee5a3" +dependencies = [ + "jni-sys", +] + [[package]] name = "netlink-packet-core" version = "0.4.2" @@ -7778,11 +8223,11 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70bf6736f74634d299d00086f02986875b3c2d924781a6a2cb6c201e73da0ceb" +checksum = "683751d591e6d81200c39fb0d1032608b77724f34114db54f571ff1317b337c0" dependencies = [ - "num_enum_derive 0.7.0", + "num_enum_derive 0.7.1", ] [[package]] @@ -7791,7 +8236,7 @@ version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -7799,14 +8244,14 @@ dependencies = [ [[package]] name = "num_enum_derive" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ea360eafe1022f7cc56cd7b869ed57330fb2453d0c7831d99b74c65d2f5597" +checksum = "6c11e44798ad209ccdd91fc192f0526a369a01234f7373e1b141c96d7cee4f0e" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -7836,6 +8281,38 @@ dependencies = [ "memchr", ] +[[package]] +name = "oboe" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8868cc237ee02e2d9618539a23a8d228b9bb3fc2e7a5b11eed3831de77c395d0" +dependencies = [ + "jni 0.20.0", + "ndk", + "ndk-context", + "num-derive", + "num-traits 0.2.17", + "oboe-sys", +] + +[[package]] +name = "oboe-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f44155e7fb718d3cfddcf70690b2b51ac4412f347cd9e4fbe511abe9cd7b5f2" +dependencies = [ + "cc", +] + +[[package]] +name = "ogg" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6951b4e8bf21c8193da321bcce9c9dd2e13c858fe078bf9054a288b419ae5d6e" +dependencies = [ + "byteorder", +] + [[package]] name = "oid-registry" version = "0.4.0" @@ -7905,9 +8382,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.57" +version = "0.10.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" +checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33" dependencies = [ "bitflags 2.4.1", "cfg-if", @@ -7926,7 +8403,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -7937,9 +8414,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.93" +version = "0.9.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9" dependencies = [ "cc", "libc", @@ -7978,119 +8455,38 @@ dependencies = [ [[package]] name = "pallet-aura" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support", "frame-system", + "log", "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto 7.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-consensus-aura", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "pallet-authorship" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-runtime 7.0.0", - "sp-std 5.0.0", -] - -[[package]] -name = "pallet-babe" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "frame-benchmarking", - "frame-support", - "frame-system", - "log", - "pallet-authorship", - "pallet-session", - "pallet-timestamp", - "parity-scale-codec", - "scale-info", - "sp-application-crypto 7.0.0", - "sp-consensus-babe", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-session", - "sp-staking", - "sp-std 5.0.0", -] - -[[package]] -name = "pallet-balances" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "frame-benchmarking", - "frame-support", - "frame-system", - "log", - "parity-scale-codec", - "scale-info", - "sp-runtime 7.0.0", - "sp-std 5.0.0", -] - -[[package]] -name = "pallet-beefy" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "frame-support", - "frame-system", - "pallet-authorship", - "pallet-session", - "parity-scale-codec", - "scale-info", - "serde", - "sp-consensus-beefy", - "sp-runtime 7.0.0", - "sp-session", - "sp-staking", - "sp-std 5.0.0", -] - -[[package]] -name = "pallet-beefy-mmr" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "array-bytes 4.2.0", - "binary-merkle-tree", - "frame-support", - "frame-system", - "log", - "pallet-beefy", - "pallet-mmr", - "pallet-session", - "parity-scale-codec", - "scale-info", - "serde", - "sp-api", - "sp-consensus-beefy", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "pallet-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-benchmarking", "frame-support", @@ -8100,52 +8496,20 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "sp-application-crypto 7.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-consensus-grandpa", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-session", "sp-staking", - "sp-std 5.0.0", -] - -[[package]] -name = "pallet-mmr" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "frame-benchmarking", - "frame-support", - "frame-system", - "parity-scale-codec", - "scale-info", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-mmr-primitives", - "sp-runtime 7.0.0", - "sp-std 5.0.0", -] - -[[package]] -name = "pallet-root-testing" -version = "1.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "frame-support", - "frame-system", - "parity-scale-codec", - "scale-info", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "pallet-session" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-support", "frame-system", @@ -8154,13 +8518,14 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-session", "sp-staking", - "sp-std 5.0.0", - "sp-trie 7.0.0", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -8202,12 +8567,12 @@ dependencies = [ "serde_json", "serde_with", "sp-api", - "sp-arithmetic 6.0.0", - "sp-core 7.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "starknet-core", "starknet-crypto 0.6.1", "starknet-ff", @@ -8215,26 +8580,12 @@ dependencies = [ "test-case", ] -[[package]] -name = "pallet-sudo" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "frame-benchmarking", - "frame-support", - "frame-system", - "parity-scale-codec", - "scale-info", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", -] - [[package]] name = "pallet-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ + "docify", "frame-benchmarking", "frame-support", "frame-system", @@ -8242,9 +8593,10 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-inherents", - "sp-io 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-timestamp", ] @@ -8254,7 +8606,7 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59e9ab494af9e6e813c72170f0d3c1de1500990d62c97cc05cc7576f91aa402f" dependencies = [ - "blake2", + "blake2 0.10.6", "crc32fast", "fs2", "hex", @@ -8289,7 +8641,7 @@ version = "3.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -8344,7 +8696,7 @@ dependencies = [ "instant", "libc", "redox_syscall 0.2.16", - "smallvec 1.11.1", + "smallvec 1.11.2", "winapi", ] @@ -8357,7 +8709,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall 0.4.1", - "smallvec 1.11.1", + "smallvec 1.11.2", "windows-targets 0.48.5", ] @@ -8375,7 +8727,7 @@ checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", "rand_core 0.6.4", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -8459,9 +8811,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" +checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5" dependencies = [ "memchr", "thiserror", @@ -8470,9 +8822,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35513f630d46400a977c4cb58f78e1bfbe01434316e60c37d27b9ad6139c66d8" +checksum = "81d78524685f5ef2a3b3bd1cafbc9fcabb036253d9b1463e726a91cd16e2dfc2" dependencies = [ "pest", "pest_generator", @@ -8480,22 +8832,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc9fc1b9e7057baba189b5c626e2d6f40681ae5b6eb064dc7c7834101ec8123a" +checksum = "68bd1206e71118b5356dae5ddc61c8b11e28b09ef6a31acbd15ea48a28e0c227" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "pest_meta" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d" +checksum = "7c747191d4ad9e4a4ab9c8798f1e82a39affe7ef9648390b7e5548d18e099de6" dependencies = [ "once_cell", "pest", @@ -8509,7 +8861,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.0.2", + "indexmap 2.1.0", ] [[package]] @@ -8552,7 +8904,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -8596,7 +8948,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -8645,15 +8997,9 @@ checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" [[package]] name = "platforms" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d0eef3571242013a0d5dc84861c3ae4a652e56e12adf8bdc26ff5f8cb34c94" - -[[package]] -name = "platforms" -version = "3.1.2" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" +checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" [[package]] name = "polling" @@ -8673,13 +9019,13 @@ dependencies = [ [[package]] name = "poly1305" -version = "0.7.2" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "048aeb476be11a4b6ca432ca569e375810de9294ae78f4774e78ea98a9246ede" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" dependencies = [ "cpufeatures", "opaque-debug 0.3.0", - "universal-hash 0.4.1", + "universal-hash 0.5.1", ] [[package]] @@ -8708,9 +9054,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.4.3" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b" +checksum = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b" [[package]] name = "powerfmt" @@ -8787,7 +9133,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -8814,6 +9160,15 @@ dependencies = [ "toml 0.5.11", ] +[[package]] +name = "proc-macro-crate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +dependencies = [ + "toml_edit 0.20.7", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -8840,13 +9195,13 @@ dependencies = [ [[package]] name = "proc-macro-warning" -version = "0.3.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e99670bafb56b9a106419397343bdbc8b8742c3cc449fec6345f86173f47cd4" +checksum = "9b698b0b09d40e9b7c1a47b132d66a8b54bcd20583d9b6d06e4535e383b4405c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -8898,14 +9253,14 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "proptest" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" +checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" dependencies = [ "bitflags 2.4.1", "lazy_static", @@ -8913,7 +9268,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.7.5", + "regex-syntax 0.8.2", "unarray", ] @@ -8976,7 +9331,7 @@ dependencies = [ "prost 0.12.1", "prost-types 0.12.1", "regex", - "syn 2.0.38", + "syn 2.0.39", "tempfile", "which", ] @@ -9004,7 +9359,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -9165,7 +9520,17 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", +] + +[[package]] +name = "rand_distr" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" +dependencies = [ + "num-traits 0.2.17", + "rand 0.8.5", ] [[package]] @@ -9195,6 +9560,12 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "raw-window-handle" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9" + [[package]] name = "rawpointer" version = "0.2.1" @@ -9275,12 +9646,12 @@ dependencies = [ [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.10", - "redox_syscall 0.2.16", + "getrandom 0.2.11", + "libredox", "thiserror", ] @@ -9301,7 +9672,7 @@ checksum = "7f7473c2cfcf90008193dd0e3e16599455cb601a9fce322b5bb55de799664925" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -9313,7 +9684,7 @@ dependencies = [ "fxhash", "log", "slice-group-by", - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] @@ -9378,7 +9749,7 @@ version = "0.11.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" dependencies = [ - "base64 0.21.4", + "base64 0.21.5", "bytes", "encoding_rs", "futures-core", @@ -9387,7 +9758,7 @@ dependencies = [ "http", "http-body", "hyper", - "hyper-rustls 0.24.1", + "hyper-rustls", "hyper-tls", "ipnet", "js-sys", @@ -9397,7 +9768,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite 0.2.13", - "rustls 0.21.7", + "rustls 0.21.8", "rustls-pemfile", "serde", "serde_json", @@ -9405,7 +9776,7 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", - "tokio-rustls 0.24.1", + "tokio-rustls", "tower-service", "url", "wasm-bindgen", @@ -9443,7 +9814,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ "hmac 0.12.1", - "subtle", + "subtle 2.4.1", +] + +[[package]] +name = "ring" +version = "0.1.0" +source = "git+https://github.com/burdges/ring-proof?branch=patch-1#05a756076cb20f981a52afea3a620168de49f95f" +dependencies = [ + "ark-ec", + "ark-ff 0.4.2", + "ark-poly", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "blake2 0.10.6", + "common", + "fflonk", + "merlin 3.0.0", ] [[package]] @@ -9468,7 +9855,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb0205304757e5d899b9c2e448b867ffd03ae7f988002e47cd24954391394d0b" dependencies = [ "cc", - "getrandom 0.2.10", + "getrandom 0.2.11", "libc", "spin 0.9.8", "untrusted 0.9.0", @@ -9516,15 +9903,28 @@ dependencies = [ "librocksdb-sys", ] +[[package]] +name = "rodio" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b1bb7b48ee48471f55da122c0044fcc7600cfcc85db88240b89cb832935e611" +dependencies = [ + "claxon", + "cpal", + "hound", + "lewton", + "symphonia", +] + [[package]] name = "rpassword" -version = "7.2.0" +version = "7.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6678cf63ab3491898c0d021b493c94c9b221d91295294a2a5746eacbe5928322" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" dependencies = [ "libc", "rtoolbox", - "winapi", + "windows-sys 0.48.0", ] [[package]] @@ -9552,7 +9952,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.38", + "syn 2.0.39", "unicode-ident", ] @@ -9584,12 +9984,12 @@ dependencies = [ [[package]] name = "rtoolbox" -version = "0.0.1" +version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034e22c514f5c0cb8a10ff341b9b048b5ceb21591f31c8f44c43b960f9b3524a" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" dependencies = [ "libc", - "winapi", + "windows-sys 0.48.0", ] [[package]] @@ -9608,9 +10008,9 @@ dependencies = [ [[package]] name = "ruint" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95294d6e3a6192f3aabf91c38f56505a625aa495533442744185a36d75a790c4" +checksum = "724fd11728a3804e9944b14cab63825024c40bf42f8af87c8b5d97c4bbacf426" dependencies = [ "alloy-rlp", "ark-ff 0.3.0", @@ -9618,6 +10018,7 @@ dependencies = [ "bytes", "fastrlp", "num-bigint", + "num-traits 0.2.17", "parity-scale-codec", "primitive-types", "proptest", @@ -9682,9 +10083,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.36.16" +version = "0.36.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6da3636faa25820d8648e0e31c5d519bbb01f72fdf57131f0f5f7da5fed36eab" +checksum = "305efbd14fde4139eb501df5f136994bb520b033fa9fbdce287507dc23b8c7ed" dependencies = [ "bitflags 1.3.2", "errno", @@ -9696,9 +10097,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.26" +version = "0.37.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84f3f8f960ed3b5a59055428714943298bf3fa2d4a1d53135084e0544829d995" +checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" dependencies = [ "bitflags 1.3.2", "errno", @@ -9710,14 +10111,14 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.20" +version = "0.38.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ce50cb2e16c2903e30d1cbccfd8387a74b9d4c938b6a4c5ec6cc7556f7a8a0" +checksum = "9ad981d6c340a49cdc40a1028d9c6084ec7e9fa33fcb839cab656a267071e234" dependencies = [ "bitflags 2.4.1", "errno", "libc", - "linux-raw-sys 0.4.10", + "linux-raw-sys 0.4.11", "windows-sys 0.48.0", ] @@ -9742,20 +10143,20 @@ checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" dependencies = [ "log", "ring 0.16.20", - "sct 0.7.0", + "sct 0.7.1", "webpki 0.22.4", ] [[package]] name = "rustls" -version = "0.21.7" +version = "0.21.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" +checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" dependencies = [ "log", - "ring 0.16.20", - "rustls-webpki 0.101.6", - "sct 0.7.0", + "ring 0.17.5", + "rustls-webpki", + "sct 0.7.1", ] [[package]] @@ -9772,31 +10173,21 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" -dependencies = [ - "base64 0.21.4", -] - -[[package]] -name = "rustls-webpki" -version = "0.100.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6a5fc258f1c1276dfe3016516945546e2d5383911efc0fc4f1cdc5df3a4ae3" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", + "base64 0.21.5", ] [[package]] name = "rustls-webpki" -version = "0.101.6" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", + "ring 0.17.5", + "untrusted 0.9.0", ] [[package]] @@ -9845,7 +10236,7 @@ dependencies = [ "parking_lot 0.11.2", "rustc-hash", "salsa-macros", - "smallvec 1.11.1", + "smallvec 1.11.2", ] [[package]] @@ -9881,33 +10272,56 @@ dependencies = [ [[package]] name = "sc-allocator" version = "4.1.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "log", - "sp-core 7.0.0", - "sp-wasm-interface 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-wasm-interface 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] +[[package]] +name = "sc-basic-authorship" +version = "0.10.0-dev" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" +dependencies = [ + "futures", + "futures-timer", + "log", + "parity-scale-codec", + "sc-block-builder", + "sc-client-api", + "sc-proposer-metrics", + "sc-telemetry", + "sc-transaction-pool-api", + "sp-api", + "sp-blockchain", + "sp-consensus", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-inherents", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "substrate-prometheus-endpoint", +] + [[package]] name = "sc-block-builder" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "sc-client-api", "sp-api", "sp-block-builder", "sp-blockchain", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-chain-spec" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "memmap2", "sc-chain-spec-derive", @@ -9918,30 +10332,30 @@ dependencies = [ "serde", "serde_json", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-chain-spec-derive" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sc-cli" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "chrono", - "clap 4.4.7", + "clap 4.4.8", "fdlimit", "futures", "libp2p-identity 0.1.3", @@ -9954,8 +10368,8 @@ dependencies = [ "sc-client-api", "sc-client-db", "sc-keystore", + "sc-mixnet", "sc-network", - "sc-network-common", "sc-service", "sc-telemetry", "sc-tracing", @@ -9963,11 +10377,11 @@ dependencies = [ "serde", "serde_json", "sp-blockchain", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-keyring", - "sp-keystore 0.13.0", - "sp-panic-handler 5.0.0", - "sp-runtime 7.0.0", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-panic-handler 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", "thiserror", "tiny-bip39", @@ -9977,7 +10391,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "fnv", "futures", @@ -9990,21 +10404,21 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-database", - "sp-externalities 0.13.0", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-statement-store", - "sp-storage 7.0.0", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", ] [[package]] name = "sc-client-db" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "hash-db", "kvdb", @@ -10018,19 +10432,19 @@ dependencies = [ "sc-client-api", "sc-state-db", "schnellru", - "sp-arithmetic 6.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-blockchain", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-database", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", - "sp-trie 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-consensus" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "futures", @@ -10045,9 +10459,9 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "thiserror", ] @@ -10055,7 +10469,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "futures", @@ -10067,16 +10481,16 @@ dependencies = [ "sc-consensus-slots", "sc-telemetry", "sp-api", - "sp-application-crypto 7.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-block-builder", "sp-blockchain", "sp-consensus", "sp-consensus-aura", "sp-consensus-slots", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "thiserror", ] @@ -10084,7 +10498,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "fork-tree", @@ -10099,20 +10513,19 @@ dependencies = [ "sc-consensus", "sc-consensus-epochs", "sc-consensus-slots", - "sc-keystore", "sc-telemetry", - "scale-info", + "sc-transaction-pool-api", "sp-api", - "sp-application-crypto 7.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-block-builder", "sp-blockchain", "sp-consensus", "sp-consensus-babe", "sp-consensus-slots", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "thiserror", ] @@ -10120,23 +10533,23 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "fork-tree", "parity-scale-codec", "sc-client-api", "sc-consensus", "sp-blockchain", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-consensus-grandpa" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "ahash 0.8.3", - "array-bytes 4.2.0", + "ahash 0.8.6", + "array-bytes 6.2.0", "async-trait", "dyn-clone", "finality-grandpa", @@ -10155,17 +10568,18 @@ dependencies = [ "sc-network-common", "sc-network-gossip", "sc-telemetry", + "sc-transaction-pool-api", "sc-utils", "serde_json", "sp-api", - "sp-application-crypto 7.0.0", - "sp-arithmetic 6.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-blockchain", "sp-consensus", "sp-consensus-grandpa", - "sp-core 7.0.0", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "thiserror", ] @@ -10173,7 +10587,7 @@ dependencies = [ [[package]] name = "sc-consensus-manual-seal" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "assert_matches", "async-trait", @@ -10196,10 +10610,10 @@ dependencies = [ "sp-consensus-aura", "sp-consensus-babe", "sp-consensus-slots", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-timestamp", "substrate-prometheus-endpoint", "thiserror", @@ -10208,7 +10622,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "futures", @@ -10218,46 +10632,46 @@ dependencies = [ "sc-client-api", "sc-consensus", "sc-telemetry", - "sp-arithmetic 6.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-blockchain", "sp-consensus", "sp-consensus-slots", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-executor" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "lru 0.8.1", "parity-scale-codec", "parking_lot 0.12.1", "sc-executor-common", "sc-executor-wasmtime", + "schnellru", "sp-api", - "sp-core 7.0.0", - "sp-externalities 0.13.0", - "sp-io 7.0.0", - "sp-panic-handler 5.0.0", - "sp-runtime-interface 7.0.0", - "sp-trie 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-panic-handler 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime-interface 17.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", - "sp-wasm-interface 7.0.0", + "sp-wasm-interface 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "tracing", ] [[package]] name = "sc-executor-common" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "sc-allocator", "sp-maybe-compressed-blob", - "sp-wasm-interface 7.0.0", + "sp-wasm-interface 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", "wasm-instrument", ] @@ -10265,25 +10679,25 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "anyhow", "cfg-if", "libc", "log", - "once_cell", - "rustix 0.36.16", + "parking_lot 0.12.1", + "rustix 0.36.17", "sc-allocator", "sc-executor-common", - "sp-runtime-interface 7.0.0", - "sp-wasm-interface 7.0.0", + "sp-runtime-interface 17.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-wasm-interface 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "wasmtime", ] [[package]] name = "sc-informant" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "ansi_term", "futures", @@ -10293,29 +10707,57 @@ dependencies = [ "sc-network", "sc-network-common", "sp-blockchain", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-keystore" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "parking_lot 0.12.1", "serde_json", - "sp-application-crypto 7.0.0", - "sp-core 7.0.0", - "sp-keystore 0.13.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "thiserror", +] + +[[package]] +name = "sc-mixnet" +version = "0.1.0-dev" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" +dependencies = [ + "array-bytes 4.2.0", + "arrayvec 0.7.4", + "blake2 0.10.6", + "futures", + "futures-timer", + "libp2p-identity 0.1.3", + "log", + "mixnet", + "multiaddr 0.17.1", + "parity-scale-codec", + "parking_lot 0.12.1", + "sc-client-api", + "sc-network", + "sc-transaction-pool-api", + "sp-api", + "sp-consensus", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-mixnet", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sc-network" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "async-channel", "async-trait", "asynchronous-codec", @@ -10328,40 +10770,36 @@ dependencies = [ "libp2p", "linked_hash_set", "log", - "lru 0.8.1", "mockall", "parity-scale-codec", "parking_lot 0.12.1", + "partial_sort", "pin-project", "rand 0.8.5", - "sc-block-builder", "sc-client-api", - "sc-consensus", "sc-network-common", - "sc-peerset", "sc-utils", "serde", "serde_json", - "smallvec 1.11.1", - "snow", - "sp-arithmetic 6.0.0", + "smallvec 1.11.2", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-blockchain", - "sp-consensus", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "thiserror", "unsigned-varint", + "wasm-timer", "zeroize", ] [[package]] name = "sc-network-bitswap" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-channel", - "cid 0.8.6", + "cid 0.9.0", "futures", "libp2p-identity 0.1.3", "log", @@ -10369,9 +10807,8 @@ dependencies = [ "prost-build 0.11.9", "sc-client-api", "sc-network", - "sc-network-common", "sp-blockchain", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", "unsigned-varint", ] @@ -10379,46 +10816,34 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", "async-trait", "bitflags 1.3.2", - "bytes", "futures", - "futures-timer", "libp2p-identity 0.1.3", "parity-scale-codec", "prost-build 0.11.9", "sc-consensus", - "sc-peerset", - "sc-utils", - "serde", - "smallvec 1.11.1", - "sp-blockchain", "sp-consensus", "sp-consensus-grandpa", - "sp-runtime 7.0.0", - "substrate-prometheus-endpoint", - "thiserror", - "zeroize", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-network-gossip" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "futures", "futures-timer", "libp2p", "log", - "lru 0.8.1", "sc-network", "sc-network-common", - "sc-peerset", - "sp-runtime 7.0.0", + "schnellru", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "tracing", ] @@ -10426,9 +10851,9 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "async-channel", "futures", "libp2p-identity 0.1.3", @@ -10438,20 +10863,18 @@ dependencies = [ "prost-build 0.11.9", "sc-client-api", "sc-network", - "sc-network-common", - "sc-peerset", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sc-network-sync" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "async-channel", "async-trait", "fork-tree", @@ -10459,7 +10882,6 @@ dependencies = [ "futures-timer", "libp2p", "log", - "lru 0.8.1", "mockall", "parity-scale-codec", "prost 0.11.9", @@ -10468,52 +10890,52 @@ dependencies = [ "sc-consensus", "sc-network", "sc-network-common", - "sc-peerset", "sc-utils", - "smallvec 1.11.1", - "sp-arithmetic 6.0.0", + "schnellru", + "smallvec 1.11.2", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-blockchain", "sp-consensus", "sp-consensus-grandpa", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", "thiserror", + "tokio-stream", ] [[package]] name = "sc-network-transactions" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "futures", "libp2p", "log", "parity-scale-codec", - "pin-project", "sc-network", "sc-network-common", - "sc-peerset", "sc-utils", "sp-consensus", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-prometheus-endpoint", ] [[package]] name = "sc-offchain" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "bytes", "fnv", "futures", "futures-timer", "hyper", - "hyper-rustls 0.23.2", + "hyper-rustls", "libp2p", + "log", "num_cpus", "once_cell", "parity-scale-codec", @@ -10522,36 +10944,22 @@ dependencies = [ "sc-client-api", "sc-network", "sc-network-common", - "sc-peerset", + "sc-transaction-pool-api", "sc-utils", "sp-api", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-offchain", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "threadpool", "tracing", ] -[[package]] -name = "sc-peerset" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "futures", - "libp2p-identity 0.1.3", - "log", - "parking_lot 0.12.1", - "partial_sort", - "sc-utils", - "serde_json", - "sp-arithmetic 6.0.0", - "wasm-timer", -] - [[package]] name = "sc-proposer-metrics" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -10560,7 +10968,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "futures", "jsonrpsee 0.16.3", @@ -10570,6 +10978,7 @@ dependencies = [ "sc-block-builder", "sc-chain-spec", "sc-client-api", + "sc-mixnet", "sc-rpc-api", "sc-tracing", "sc-transaction-pool-api", @@ -10577,11 +10986,11 @@ dependencies = [ "serde_json", "sp-api", "sp-blockchain", - "sp-core 7.0.0", - "sp-keystore 0.13.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-offchain", "sp-rpc", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-session", "sp-statement-store", "sp-version", @@ -10591,18 +11000,19 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "jsonrpsee 0.16.3", "parity-scale-codec", "sc-chain-spec", + "sc-mixnet", "sc-transaction-pool-api", "scale-info", "serde", "serde_json", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-rpc", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", "thiserror", ] @@ -10610,7 +11020,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "http", "jsonrpsee 0.16.3", @@ -10625,9 +11035,9 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", "futures", "futures-util", "hex", @@ -10638,20 +11048,22 @@ dependencies = [ "sc-chain-spec", "sc-client-api", "sc-transaction-pool-api", + "sc-utils", "serde", "sp-api", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", "thiserror", + "tokio", "tokio-stream", ] [[package]] name = "sc-service" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "directories", @@ -10678,11 +11090,9 @@ dependencies = [ "sc-network-light", "sc-network-sync", "sc-network-transactions", - "sc-offchain", "sc-rpc", "sc-rpc-server", "sc-rpc-spec-v2", - "sc-storage-monitor", "sc-sysinfo", "sc-telemetry", "sc-tracing", @@ -10694,16 +11104,16 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-consensus", - "sp-core 7.0.0", - "sp-externalities 0.13.0", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-session", - "sp-state-machine 0.13.0", - "sp-storage 7.0.0", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-transaction-pool", "sp-transaction-storage-proof", - "sp-trie 7.0.0", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", "static_init", "substrate-prometheus-endpoint", @@ -10717,34 +11127,18 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "log", "parity-scale-codec", "parking_lot 0.12.1", - "sp-core 7.0.0", -] - -[[package]] -name = "sc-storage-monitor" -version = "0.1.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "clap 4.4.7", - "fs4", - "futures", - "log", - "sc-client-db", - "sc-utils", - "sp-core 7.0.0", - "thiserror", - "tokio", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-sysinfo" version = "6.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "futures", "libc", @@ -10755,15 +11149,15 @@ dependencies = [ "sc-telemetry", "serde", "serde_json", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-std 5.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sc-telemetry" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "chrono", "futures", @@ -10782,7 +11176,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "ansi_term", "atty", @@ -10790,20 +11184,18 @@ dependencies = [ "lazy_static", "libc", "log", - "once_cell", "parking_lot 0.12.1", "regex", "rustc-hash", "sc-client-api", - "sc-rpc-server", "sc-tracing-proc-macro", "serde", "sp-api", "sp-blockchain", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-rpc", - "sp-runtime 7.0.0", - "sp-tracing 6.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-tracing 10.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", "tracing", "tracing-log", @@ -10813,25 +11205,24 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sc-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "futures", "futures-timer", "linked-hash-map", "log", - "num-traits 0.2.17", "parity-scale-codec", "parking_lot 0.12.1", "sc-client-api", @@ -10840,9 +11231,9 @@ dependencies = [ "serde", "sp-api", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-tracing 6.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-tracing 10.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-transaction-pool", "substrate-prometheus-endpoint", "thiserror", @@ -10851,21 +11242,23 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "futures", "log", + "parity-scale-codec", "serde", "sp-blockchain", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sc-utils" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-channel", "futures", @@ -10874,7 +11267,7 @@ dependencies = [ "log", "parking_lot 0.12.1", "prometheus", - "sp-arithmetic 6.0.0", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -10899,7 +11292,7 @@ dependencies = [ "scale-bits", "scale-decode-derive", "scale-info", - "smallvec 1.11.1", + "smallvec 1.11.2", "thiserror", ] @@ -10910,7 +11303,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4391f0dfbb6690f035f6d2a15d6a12f88cc5395c36bcc056db07ffa2a90870ec" dependencies = [ "darling 0.14.4", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -10927,7 +11320,7 @@ dependencies = [ "scale-bits", "scale-encode-derive", "scale-info", - "smallvec 1.11.1", + "smallvec 1.11.2", "thiserror", ] @@ -10938,7 +11331,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "316e0fb10ec0fee266822bd641bab5e332a4ab80ef8c5b5ff35e5401a394f5a6" dependencies = [ "darling 0.14.4", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -10964,7 +11357,7 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abf2c68b89cafb3b8d918dd07b42be0da66ff202cf1155c5739a4e0c1ea0dc19" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 1.0.109", @@ -10977,9 +11370,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2096d36e94ce9bf87d8addb752423b6b19730dc88edd7cc452bb2b90573f7a7" dependencies = [ "base58", - "blake2", + "blake2 0.10.6", "either", - "frame-metadata", + "frame-metadata 15.1.0", "parity-scale-codec", "scale-bits", "scale-decode", @@ -11001,9 +11394,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c" +checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" dependencies = [ "dyn-clone", "indexmap 1.9.3", @@ -11014,9 +11407,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c" +checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967" dependencies = [ "proc-macro2", "quote", @@ -11030,7 +11423,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "772575a524feeb803e5b0fcbc6dd9f367e579488197c94c6e4023aad2305774d" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "cfg-if", "hashbrown 0.13.2", ] @@ -11045,11 +11438,11 @@ dependencies = [ "arrayvec 0.5.2", "curve25519-dalek 2.1.3", "getrandom 0.1.16", - "merlin", + "merlin 2.0.1", "rand 0.7.3", "rand_core 0.5.1", "sha2 0.8.2", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -11089,12 +11482,12 @@ dependencies = [ [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", + "ring 0.17.5", + "untrusted 0.9.0", ] [[package]] @@ -11119,7 +11512,7 @@ dependencies = [ "der 0.6.1", "generic-array 0.14.7", "pkcs8 0.9.0", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -11133,7 +11526,7 @@ dependencies = [ "der 0.7.8", "generic-array 0.14.7", "pkcs8 0.10.2", - "subtle", + "subtle 2.4.1", "zeroize", ] @@ -11243,9 +11636,9 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.190" +version = "1.0.192" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7" +checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" dependencies = [ "serde_derive", ] @@ -11272,13 +11665,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.190" +version = "1.0.192" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3" +checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -11294,9 +11687,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.107" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" dependencies = [ "itoa", "ryu", @@ -11316,20 +11709,20 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" +checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "serde_spanned" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" dependencies = [ "serde", ] @@ -11371,7 +11764,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -11551,9 +11944,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.1" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" [[package]] name = "smol_str" @@ -11572,26 +11965,26 @@ checksum = "5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831" [[package]] name = "snow" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9d1425eb528a21de2755c75af4c9b5d57f50a0d4c3b7f1828a4cd03f8ba155" +checksum = "58021967fd0a5eeeb23b08df6cc244a4d4a5b4aec1d27c9e02fad1a58b4cd74e" dependencies = [ - "aes-gcm 0.9.4", - "blake2", + "aes-gcm 0.10.3", + "blake2 0.10.6", "chacha20poly1305", "curve25519-dalek 4.1.1", "rand_core 0.6.4", - "ring 0.16.20", + "ring 0.17.5", "rustc_version 0.4.0", "sha2 0.10.8", - "subtle", + "subtle 2.4.1", ] [[package]] name = "socket2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", "winapi", @@ -11599,9 +11992,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", "windows-sys 0.48.0", @@ -11641,19 +12034,20 @@ dependencies = [ [[package]] name = "sp-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "hash-db", "log", "parity-scale-codec", "scale-info", "sp-api-proc-macro", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-metadata-ir", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", - "sp-std 5.0.0", - "sp-trie 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version", "thiserror", ] @@ -11661,180 +12055,157 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "Inflector", - "blake2", + "blake2 0.10.6", "expander", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sp-application-crypto" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "23.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899492ea547816d5dfe9a5a2ecc32f65a7110805af6da3380aa4902371b31dc2" dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-std 5.0.0", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-io 23.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "sp-application-crypto" version = "23.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899492ea547816d5dfe9a5a2ecc32f65a7110805af6da3380aa4902371b31dc2" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 21.0.0", - "sp-io 23.0.0", - "sp-std 8.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-arithmetic" -version = "6.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb6020576e544c6824a51d651bc8df8e6ab67cd59f1c9ac09868bb81a5199ded" dependencies = [ "integer-sqrt", "num-traits 0.2.17", "parity-scale-codec", "scale-info", "serde", - "sp-std 5.0.0", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "static_assertions", ] [[package]] name = "sp-arithmetic" version = "16.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb6020576e544c6824a51d651bc8df8e6ab67cd59f1c9ac09868bb81a5199ded" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "integer-sqrt", "num-traits 0.2.17", "parity-scale-codec", "scale-info", "serde", - "sp-std 8.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "static_assertions", ] [[package]] name = "sp-block-builder" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "parity-scale-codec", "sp-api", "sp-inherents", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-blockchain" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "futures", "log", - "lru 0.8.1", "parity-scale-codec", "parking_lot 0.12.1", + "schnellru", "sp-api", "sp-consensus", "sp-database", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sp-consensus" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "futures", "log", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sp-consensus-aura" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "parity-scale-codec", "scale-info", "sp-api", - "sp-application-crypto 7.0.0", - "sp-consensus", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-consensus-slots", "sp-inherents", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-timestamp", ] [[package]] name = "sp-consensus-babe" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "parity-scale-codec", "scale-info", "serde", "sp-api", - "sp-application-crypto 7.0.0", - "sp-consensus", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-consensus-slots", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-timestamp", ] -[[package]] -name = "sp-consensus-beefy" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "lazy_static", - "parity-scale-codec", - "scale-info", - "serde", - "sp-api", - "sp-application-crypto 7.0.0", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-mmr-primitives", - "sp-runtime 7.0.0", - "sp-std 5.0.0", - "strum 0.24.1", -] - [[package]] name = "sp-consensus-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "finality-grandpa", "log", @@ -11842,33 +12213,34 @@ dependencies = [ "scale-info", "serde", "sp-api", - "sp-application-crypto 7.0.0", - "sp-core 7.0.0", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 5.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-timestamp", ] [[package]] name = "sp-core" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "21.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f18d9e2f67d8661f9729f35347069ac29d92758b59135176799db966947a7336" dependencies = [ "array-bytes 4.2.0", "bitflags 1.3.2", - "blake2", + "blake2 0.10.6", "bounded-collections", "bs58 0.4.0", "dyn-clonable", @@ -11880,7 +12252,7 @@ dependencies = [ "lazy_static", "libsecp256k1", "log", - "merlin", + "merlin 2.0.1", "parity-scale-codec", "parking_lot 0.12.1", "paste", @@ -11892,12 +12264,12 @@ dependencies = [ "secp256k1", "secrecy", "serde", - "sp-core-hashing 5.0.0", - "sp-debug-derive 5.0.0", - "sp-externalities 0.13.0", - "sp-runtime-interface 7.0.0", - "sp-std 5.0.0", - "sp-storage 7.0.0", + "sp-core-hashing 9.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-debug-derive 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-externalities 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-runtime-interface 17.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-storage 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "ss58-registry", "substrate-bip39", "thiserror", @@ -11908,14 +12280,14 @@ dependencies = [ [[package]] name = "sp-core" version = "21.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f18d9e2f67d8661f9729f35347069ac29d92758b59135176799db966947a7336" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "array-bytes 4.2.0", + "array-bytes 6.2.0", + "bandersnatch_vrfs", "bitflags 1.3.2", - "blake2", + "blake2 0.10.6", "bounded-collections", - "bs58 0.4.0", + "bs58 0.5.0", "dyn-clonable", "ed25519-zebra", "futures", @@ -11925,7 +12297,7 @@ dependencies = [ "lazy_static", "libsecp256k1", "log", - "merlin", + "merlin 2.0.1", "parity-scale-codec", "parking_lot 0.12.1", "paste", @@ -11937,63 +12309,63 @@ dependencies = [ "secp256k1", "secrecy", "serde", - "sp-core-hashing 9.0.0", - "sp-debug-derive 8.0.0", - "sp-externalities 0.19.0", - "sp-runtime-interface 17.0.0", - "sp-std 8.0.0", - "sp-storage 13.0.0", + "sp-core-hashing 9.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-debug-derive 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime-interface 17.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "ss58-registry", "substrate-bip39", "thiserror", "tiny-bip39", + "tracing", + "w3f-bls", "zeroize", ] [[package]] name = "sp-core-hashing" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ee599a8399448e65197f9a6cee338ad192e9023e35e31f22382964c3c174c68" dependencies = [ "blake2b_simd", "byteorder", "digest 0.10.7", "sha2 0.10.8", "sha3", - "sp-std 5.0.0", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "twox-hash", ] [[package]] name = "sp-core-hashing" version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee599a8399448e65197f9a6cee338ad192e9023e35e31f22382964c3c174c68" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "blake2b_simd", "byteorder", "digest 0.10.7", "sha2 0.10.8", "sha3", - "sp-std 8.0.0", "twox-hash", ] [[package]] name = "sp-core-hashing-proc-macro" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "9.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "proc-macro2", "quote", - "sp-core-hashing 5.0.0", - "syn 2.0.38", + "sp-core-hashing 9.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "syn 2.0.39", ] [[package]] name = "sp-database" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -12001,67 +12373,78 @@ dependencies = [ [[package]] name = "sp-debug-derive" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f531814d2f16995144c74428830ccf7d94ff4a7749632b83ad8199b181140c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sp-debug-derive" version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f531814d2f16995144c74428830ccf7d94ff4a7749632b83ad8199b181140c" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sp-externalities" -version = "0.13.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0f71c671e01a8ca60da925d43a1b351b69626e268b8837f8371e320cf1dd100" dependencies = [ "environmental", "parity-scale-codec", - "sp-std 5.0.0", - "sp-storage 7.0.0", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-storage 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "sp-externalities" version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0f71c671e01a8ca60da925d43a1b351b69626e268b8837f8371e320cf1dd100" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "environmental", "parity-scale-codec", - "sp-std 8.0.0", - "sp-storage 13.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", +] + +[[package]] +name = "sp-genesis-builder" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" +dependencies = [ + "serde_json", + "sp-api", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-inherents" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sp-io" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "23.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d597e35a9628fe7454b08965b2442e3ec0f264b0a90d41328e87422cec02e99" dependencies = [ "bytes", "ed25519 1.5.3", @@ -12072,14 +12455,14 @@ dependencies = [ "parity-scale-codec", "rustversion", "secp256k1", - "sp-core 7.0.0", - "sp-externalities 0.13.0", - "sp-keystore 0.13.0", - "sp-runtime-interface 7.0.0", - "sp-state-machine 0.13.0", - "sp-std 5.0.0", - "sp-tracing 6.0.0", - "sp-trie 7.0.0", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-externalities 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-keystore 0.27.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-runtime-interface 17.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-state-machine 0.28.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-tracing 10.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-trie 22.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", "tracing-core", ] @@ -12087,73 +12470,68 @@ dependencies = [ [[package]] name = "sp-io" version = "23.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d597e35a9628fe7454b08965b2442e3ec0f264b0a90d41328e87422cec02e99" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "bytes", - "ed25519 1.5.3", - "ed25519-dalek 1.0.1", - "futures", + "ed25519-dalek 2.0.0", "libsecp256k1", "log", "parity-scale-codec", "rustversion", "secp256k1", - "sp-core 21.0.0", - "sp-externalities 0.19.0", - "sp-keystore 0.27.0", - "sp-runtime-interface 17.0.0", - "sp-state-machine 0.28.0", - "sp-std 8.0.0", - "sp-tracing 10.0.0", - "sp-trie 22.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime-interface 17.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-tracing 10.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "tracing", "tracing-core", ] [[package]] name = "sp-keyring" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "24.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "lazy_static", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "strum 0.24.1", ] [[package]] name = "sp-keystore" -version = "0.13.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9be3cdd67cc1d9c1db17c5cbc4ec4924054a8437009d167f21f6590797e4aa45" dependencies = [ "futures", "parity-scale-codec", "parking_lot 0.12.1", - "serde", - "sp-core 7.0.0", - "sp-externalities 0.13.0", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-externalities 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror", ] [[package]] name = "sp-keystore" version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9be3cdd67cc1d9c1db17c5cbc4ec4924054a8437009d167f21f6590797e4aa45" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "futures", "parity-scale-codec", "parking_lot 0.12.1", - "sp-core 21.0.0", - "sp-externalities 0.19.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sp-maybe-compressed-blob" version = "4.1.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "thiserror", "zstd 0.12.4", @@ -12162,46 +12540,41 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.1.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "frame-metadata", + "frame-metadata 16.0.0", "parity-scale-codec", "scale-info", - "sp-std 5.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] -name = "sp-mmr-primitives" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +name = "sp-mixnet" +version = "0.1.0-dev" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "ckb-merkle-mountain-range", - "log", "parity-scale-codec", "scale-info", - "serde", "sp-api", - "sp-core 7.0.0", - "sp-debug-derive 5.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", - "thiserror", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-offchain" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "sp-api", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-panic-handler" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd2de46003fa8212426838ca71cd42ee36a26480ba9ffea983506ce03131033" dependencies = [ "backtrace", "lazy_static", @@ -12211,8 +12584,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebd2de46003fa8212426838ca71cd42ee36a26480ba9ffea983506ce03131033" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "backtrace", "lazy_static", @@ -12222,17 +12594,18 @@ dependencies = [ [[package]] name = "sp-rpc" version = "6.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "rustc-hash", "serde", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-runtime" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "24.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21c5bfc764a1a8259d7e8f7cfd22c84006275a512c958d3ff966c92151e134d5" dependencies = [ "either", "hash256-std-hasher", @@ -12243,19 +12616,18 @@ dependencies = [ "rand 0.8.5", "scale-info", "serde", - "sp-application-crypto 7.0.0", - "sp-arithmetic 6.0.0", - "sp-core 7.0.0", - "sp-io 7.0.0", - "sp-std 5.0.0", - "sp-weights 4.0.0", + "sp-application-crypto 23.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-arithmetic 16.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-io 23.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-weights 20.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "sp-runtime" version = "24.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21c5bfc764a1a8259d7e8f7cfd22c84006275a512c958d3ff966c92151e134d5" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "either", "hash256-std-hasher", @@ -12266,119 +12638,122 @@ dependencies = [ "rand 0.8.5", "scale-info", "serde", - "sp-application-crypto 23.0.0", - "sp-arithmetic 16.0.0", - "sp-core 21.0.0", - "sp-io 23.0.0", - "sp-std 8.0.0", - "sp-weights 20.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-weights 20.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-runtime-interface" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "17.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e676128182f90015e916f806cba635c8141e341e7abbc45d25525472e1bbce8" dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec", "primitive-types", - "sp-externalities 0.13.0", - "sp-runtime-interface-proc-macro 6.0.0", - "sp-std 5.0.0", - "sp-storage 7.0.0", - "sp-tracing 6.0.0", - "sp-wasm-interface 7.0.0", + "sp-externalities 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-runtime-interface-proc-macro 11.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-storage 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-tracing 10.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-wasm-interface 14.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "static_assertions", ] [[package]] name = "sp-runtime-interface" version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e676128182f90015e916f806cba635c8141e341e7abbc45d25525472e1bbce8" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec", "primitive-types", - "sp-externalities 0.19.0", - "sp-runtime-interface-proc-macro 11.0.0", - "sp-std 8.0.0", - "sp-storage 13.0.0", - "sp-tracing 10.0.0", - "sp-wasm-interface 14.0.0", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime-interface-proc-macro 11.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-storage 13.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-tracing 10.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-wasm-interface 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "static_assertions", ] [[package]] name = "sp-runtime-interface-proc-macro" -version = "6.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "11.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d5bd5566fe5633ec48dfa35ab152fd29f8a577c21971e1c6db9f28afb9bbb9" dependencies = [ "Inflector", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sp-runtime-interface-proc-macro" version = "11.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d5bd5566fe5633ec48dfa35ab152fd29f8a577c21971e1c6db9f28afb9bbb9" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "Inflector", - "proc-macro-crate", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sp-session" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "scale-info", "sp-api", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-staking", - "sp-std 5.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-staking" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ + "impl-trait-for-tuples", "parity-scale-codec", "scale-info", "serde", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-state-machine" -version = "0.13.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ef45d31f9e7ac648f8899a0cd038a3608f8499028bff55b6c799702592325b6" dependencies = [ "hash-db", "log", "parity-scale-codec", "parking_lot 0.12.1", "rand 0.8.5", - "smallvec 1.11.1", - "sp-core 7.0.0", - "sp-externalities 0.13.0", - "sp-panic-handler 5.0.0", - "sp-std 5.0.0", - "sp-trie 7.0.0", + "smallvec 1.11.2", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-externalities 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-panic-handler 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-trie 22.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror", "tracing", ] @@ -12386,102 +12761,107 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ef45d31f9e7ac648f8899a0cd038a3608f8499028bff55b6c799702592325b6" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "hash-db", "log", "parity-scale-codec", "parking_lot 0.12.1", "rand 0.8.5", - "smallvec 1.11.1", - "sp-core 21.0.0", - "sp-externalities 0.19.0", - "sp-panic-handler 8.0.0", - "sp-std 8.0.0", - "sp-trie 22.0.0", + "smallvec 1.11.2", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-panic-handler 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", "tracing", + "trie-db 0.28.0", ] [[package]] name = "sp-statement-store" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "log", + "aes-gcm 0.10.3", + "curve25519-dalek 4.1.1", + "ed25519-dalek 2.0.0", + "hkdf", "parity-scale-codec", + "rand 0.8.5", "scale-info", + "sha2 0.10.8", "sp-api", - "sp-application-crypto 7.0.0", - "sp-core 7.0.0", - "sp-externalities 0.13.0", - "sp-runtime 7.0.0", - "sp-runtime-interface 7.0.0", - "sp-std 5.0.0", + "sp-application-crypto 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime-interface 17.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", + "x25519-dalek 2.0.0", ] [[package]] name = "sp-std" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" [[package]] name = "sp-std" version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" [[package]] name = "sp-storage" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "13.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94294be83f11d4958cfea89ed5798f0b6605f5defc3a996948848458abbcc18e" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 5.0.0", - "sp-std 5.0.0", + "sp-debug-derive 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "sp-storage" version = "13.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94294be83f11d4958cfea89ed5798f0b6605f5defc3a996948848458abbcc18e" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 8.0.0", - "sp-std 8.0.0", + "sp-debug-derive 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", - "futures-timer", - "log", "parity-scale-codec", "sp-inherents", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", ] [[package]] name = "sp-tracing" -version = "6.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357f7591980dd58305956d32f8f6646d0a8ea9ea0e7e868e46f53b68ddf00cec" dependencies = [ "parity-scale-codec", - "sp-std 5.0.0", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", "tracing-core", "tracing-subscriber", @@ -12489,12 +12869,11 @@ dependencies = [ [[package]] name = "sp-tracing" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357f7591980dd58305956d32f8f6646d0a8ea9ea0e7e868e46f53b68ddf00cec" +version = "10.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", - "sp-std 8.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "tracing", "tracing-core", "tracing-subscriber", @@ -12503,34 +12882,34 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "sp-api", - "sp-runtime 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-transaction-storage-proof" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", - "log", "parity-scale-codec", "scale-info", - "sp-core 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-runtime 7.0.0", - "sp-std 5.0.0", - "sp-trie 7.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-trie 22.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "sp-trie" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "22.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4eeb7ef23f79eba8609db79ef9cef242f994f1f87a3c0387b4b5f177fda74" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "hash-db", "hashbrown 0.13.2", "lazy_static", @@ -12540,21 +12919,20 @@ dependencies = [ "parking_lot 0.12.1", "scale-info", "schnellru", - "sp-core 7.0.0", - "sp-std 5.0.0", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror", "tracing", - "trie-db", + "trie-db 0.27.1", "trie-root", ] [[package]] name = "sp-trie" version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4eeb7ef23f79eba8609db79ef9cef242f994f1f87a3c0387b4b5f177fda74" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "hash-db", "hashbrown 0.13.2", "lazy_static", @@ -12562,20 +12940,21 @@ dependencies = [ "nohash-hasher", "parity-scale-codec", "parking_lot 0.12.1", + "rand 0.8.5", "scale-info", "schnellru", - "sp-core 21.0.0", - "sp-std 8.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "thiserror", "tracing", - "trie-db", + "trie-db 0.28.0", "trie-root", ] [[package]] name = "sp-version" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "22.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "impl-serde", "parity-scale-codec", @@ -12583,80 +12962,79 @@ dependencies = [ "scale-info", "serde", "sp-core-hashing-proc-macro", - "sp-runtime 7.0.0", - "sp-std 5.0.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-version-proc-macro", "thiserror", ] [[package]] name = "sp-version-proc-macro" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "8.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] name = "sp-wasm-interface" -version = "7.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19c122609ca5d8246be6386888596320d03c7bc880959eaa2c36bcd5acd6846" dependencies = [ "anyhow", "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 5.0.0", - "wasmi", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "wasmtime", ] [[package]] name = "sp-wasm-interface" version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19c122609ca5d8246be6386888596320d03c7bc880959eaa2c36bcd5acd6846" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "anyhow", "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 8.0.0", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "wasmtime", ] [[package]] name = "sp-weights" -version = "4.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +version = "20.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d084c735544f70625b821c3acdbc7a2fc1893ca98b85f1942631284692c75b" dependencies = [ "parity-scale-codec", "scale-info", "serde", - "smallvec 1.11.1", - "sp-arithmetic 6.0.0", - "sp-core 7.0.0", - "sp-debug-derive 5.0.0", - "sp-std 5.0.0", + "smallvec 1.11.2", + "sp-arithmetic 16.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-debug-derive 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-std 8.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "sp-weights" version = "20.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45d084c735544f70625b821c3acdbc7a2fc1893ca98b85f1942631284692c75b" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "parity-scale-codec", "scale-info", "serde", - "smallvec 1.11.1", - "sp-arithmetic 16.0.0", - "sp-core 21.0.0", - "sp-debug-derive 8.0.0", - "sp-std 8.0.0", + "smallvec 1.11.2", + "sp-arithmetic 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-debug-derive 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-std 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] @@ -12718,9 +13096,9 @@ dependencies = [ [[package]] name = "ss58-registry" -version = "1.43.0" +version = "1.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6915280e2d0db8911e5032a5c275571af6bdded2916abd691a659be25d3439" +checksum = "35935738370302d5e33963665b77541e4b990a3e919ec904c837a56cfc891de1" dependencies = [ "Inflector", "num-format", @@ -12772,7 +13150,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14139b1c39bdc2f1e663c12090ff5108fe50ebe62c09e15e32988dfaf445a7e4" dependencies = [ - "base64 0.21.4", + "base64 0.21.5", "flate2", "hex", "serde", @@ -12786,11 +13164,11 @@ dependencies = [ [[package]] name = "starknet-crypto" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693e6362f150f9276e429a910481fb7f3bcb8d6aa643743f587cfece0b374874" +checksum = "d3f2175b0b3fc24ff2ec6dc07f5a720498994effca7e78b11a6e1c1bd02cad52" dependencies = [ - "crypto-bigint 0.5.3", + "crypto-bigint 0.5.4", "hex", "hmac 0.12.1", "num-bigint", @@ -12810,7 +13188,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33c03f5ac70f9b067f48db7d2d70bdf18ee0f731e8192b6cfa679136becfcdb0" dependencies = [ - "crypto-bigint 0.5.3", + "crypto-bigint 0.5.4", "hex", "hmac 0.12.1", "num-bigint", @@ -12832,7 +13210,7 @@ checksum = "af6527b845423542c8a16e060ea1bc43f67229848e7cd4c4d80be994a84220ce" dependencies = [ "starknet-curve 0.4.0", "starknet-ff", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -12855,14 +13233,14 @@ dependencies = [ [[package]] name = "starknet-ff" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2cb1d9c0a50380cddab99cb202c6bfb3332728a2769bd0ca2ee80b0b390dd4" +checksum = "7584bc732e4d2a8ccebdd1dda8236f7940a79a339e30ebf338d45c329659e36c" dependencies = [ "ark-ff 0.4.2", "bigdecimal", - "crypto-bigint 0.5.3", - "getrandom 0.2.10", + "crypto-bigint 0.5.4", + "getrandom 0.2.11", "hex", "serde", ] @@ -12913,7 +13291,7 @@ version = "0.1.0" dependencies = [ "anyhow", "assert_matches", - "async-lock 3.0.0", + "async-lock 3.1.0", "flate2", "reqwest", "rstest", @@ -12939,7 +13317,7 @@ checksum = "d9386015d2e6dc3df285bfb33a3afd8ad7596c70ed38ab57019de4d2dfc7826f" dependencies = [ "async-trait", "auto_impl", - "crypto-bigint 0.5.3", + "crypto-bigint 0.5.4", "eth-keystore", "rand 0.8.5", "starknet-core", @@ -12953,7 +13331,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f6e445fbd6bf3826dda26fd64aa5311353b4799c9bd1119d6ec1906be4c73bf" dependencies = [ - "cairo-lang-starknet 2.2.0", + "cairo-lang-starknet 2.3.1", "derive_more", "hex", "indexmap 1.9.3", @@ -12961,7 +13339,7 @@ dependencies = [ "primitive-types", "serde", "serde_json", - "starknet-crypto 0.5.1", + "starknet-crypto 0.5.2", "thiserror", ] @@ -12981,7 +13359,7 @@ dependencies = [ "scale-info", "serde", "serde_json", - "starknet-crypto 0.5.1", + "starknet-crypto 0.5.2", "thiserror-no-std", ] @@ -13109,7 +13487,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -13124,7 +13502,7 @@ dependencies = [ "md-5", "rand 0.8.5", "ring 0.16.20", - "subtle", + "subtle 2.4.1", "thiserror", "tokio", "url", @@ -13133,9 +13511,9 @@ dependencies = [ [[package]] name = "substrate-bip39" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49eee6965196b32f882dd2ee85a92b1dbead41b04e53907f269de3b0dc04733c" +checksum = "e620c7098893ba667438b47169c00aacdd9e7c10e042250ce2b60b087ec97328" dependencies = [ "hmac 0.11.0", "pbkdf2 0.8.0", @@ -13147,15 +13525,12 @@ dependencies = [ [[package]] name = "substrate-build-script-utils" version = "3.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "platforms 2.0.0", -] +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" [[package]] name = "substrate-frame-rpc-system" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "frame-system-rpc-runtime-api", "futures", @@ -13167,14 +13542,14 @@ dependencies = [ "sp-api", "sp-block-builder", "sp-blockchain", - "sp-core 7.0.0", - "sp-runtime 7.0.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "substrate-prometheus-endpoint" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "hyper", "log", @@ -13186,135 +13561,26 @@ dependencies = [ [[package]] name = "substrate-rpc-client" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", "jsonrpsee 0.16.3", "log", "sc-rpc-api", "serde", - "sp-runtime 7.0.0", -] - -[[package]] -name = "substrate-test-client" -version = "2.0.1" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "array-bytes 4.2.0", - "async-trait", - "futures", - "parity-scale-codec", - "sc-client-api", - "sc-client-db", - "sc-consensus", - "sc-executor", - "sc-offchain", - "sc-service", - "serde", - "serde_json", - "sp-blockchain", - "sp-consensus", - "sp-core 7.0.0", - "sp-keyring", - "sp-keystore 0.13.0", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", -] - -[[package]] -name = "substrate-test-runtime" -version = "2.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "array-bytes 6.1.0", - "frame-executive", - "frame-support", - "frame-system", - "frame-system-rpc-runtime-api", - "log", - "memory-db", - "pallet-babe", - "pallet-balances", - "pallet-beefy-mmr", - "pallet-root-testing", - "pallet-sudo", - "pallet-timestamp", - "parity-scale-codec", - "sc-service", - "scale-info", - "serde", - "sp-api", - "sp-application-crypto 7.0.0", - "sp-block-builder", - "sp-consensus-aura", - "sp-consensus-babe", - "sp-consensus-beefy", - "sp-consensus-grandpa", - "sp-core 7.0.0", - "sp-debug-derive 5.0.0", - "sp-externalities 0.13.0", - "sp-inherents", - "sp-io 7.0.0", - "sp-keyring", - "sp-offchain", - "sp-runtime 7.0.0", - "sp-runtime-interface 7.0.0", - "sp-session", - "sp-state-machine 0.13.0", - "sp-std 5.0.0", - "sp-transaction-pool", - "sp-trie 7.0.0", - "sp-version", - "substrate-wasm-builder", - "trie-db", -] - -[[package]] -name = "substrate-test-runtime-client" -version = "2.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "futures", - "parity-scale-codec", - "sc-block-builder", - "sc-chain-spec", - "sc-client-api", - "sc-consensus", - "sp-api", - "sp-blockchain", - "sp-consensus", - "sp-core 7.0.0", - "sp-runtime 7.0.0", - "substrate-test-client", - "substrate-test-runtime", -] - -[[package]] -name = "substrate-test-runtime-transaction-pool" -version = "2.0.0" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" -dependencies = [ - "futures", - "parity-scale-codec", - "parking_lot 0.12.1", - "sc-transaction-pool", - "sc-transaction-pool-api", - "sp-blockchain", - "sp-runtime 7.0.0", - "substrate-test-runtime-client", - "thiserror", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", ] [[package]] name = "substrate-wasm-builder" version = "5.0.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "ansi_term", "build-helper", "cargo_metadata 0.15.4", "filetime", + "parity-wasm", "sp-maybe-compressed-blob", "strum 0.24.1", "tempfile", @@ -13332,6 +13598,12 @@ dependencies = [ "autocfg", ] +[[package]] +name = "subtle" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d67a5a62ba6e01cb2192ff309324cb4875d0c451d55fe2319433abe7a05a8ee" + [[package]] name = "subtle" version = "2.4.1" @@ -13360,12 +13632,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31a734d66fa935fbda56ba6a71d7e969f424c8c5608d416ba8499d71d8cbfc1f" dependencies = [ "base58", - "blake2", + "blake2 0.10.6", "derivative", "either", - "frame-metadata", + "frame-metadata 15.1.0", "futures", - "getrandom 0.2.10", + "getrandom 0.2.11", "hex", "impl-serde", "jsonrpsee 0.16.3", @@ -13378,9 +13650,9 @@ dependencies = [ "scale-value", "serde", "serde_json", - "sp-core 21.0.0", - "sp-core-hashing 9.0.0", - "sp-runtime 24.0.0", + "sp-core 21.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-core-hashing 9.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sp-runtime 24.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "subxt-macro", "subxt-metadata", "thiserror", @@ -13393,7 +13665,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e2f231d97c145c564bd544212c0cc0c29c09ff516af199f4ce00c8e055f8138" dependencies = [ - "frame-metadata", + "frame-metadata 15.1.0", "heck 0.4.1", "hex", "jsonrpsee 0.16.3", @@ -13402,7 +13674,7 @@ dependencies = [ "quote", "scale-info", "subxt-metadata", - "syn 2.0.38", + "syn 2.0.39", "thiserror", "tokio", ] @@ -13416,7 +13688,7 @@ dependencies = [ "darling 0.20.3", "proc-macro-error", "subxt-codegen", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -13425,18 +13697,18 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a01ce5044c81db3404d38c56f1e69d72eff72c54e5913c9bba4c0b58d376031f" dependencies = [ - "frame-metadata", + "frame-metadata 15.1.0", "parity-scale-codec", "scale-info", - "sp-core-hashing 9.0.0", + "sp-core-hashing 9.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror", ] [[package]] name = "svm-rs" -version = "0.3.0" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597e3a746727984cb7ea2487b6a40726cad0dbe86628e7d429aa6b8c4c153db4" +checksum = "20689c7d03b6461b502d0b95d6c24874c7d24dea2688af80486a130a06af3b07" dependencies = [ "dirs", "fs2", @@ -13452,6 +13724,56 @@ dependencies = [ "zip", ] +[[package]] +name = "symphonia" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62e48dba70095f265fdb269b99619b95d04c89e619538138383e63310b14d941" +dependencies = [ + "lazy_static", + "symphonia-bundle-mp3", + "symphonia-core", + "symphonia-metadata", +] + +[[package]] +name = "symphonia-bundle-mp3" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f31d7fece546f1e6973011a9eceae948133bbd18fd3d52f6073b1e38ae6368a" +dependencies = [ + "bitflags 1.3.2", + "lazy_static", + "log", + "symphonia-core", + "symphonia-metadata", +] + +[[package]] +name = "symphonia-core" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c73eb88fee79705268cc7b742c7bc93a7b76e092ab751d0833866970754142" +dependencies = [ + "arrayvec 0.7.4", + "bitflags 1.3.2", + "bytemuck", + "lazy_static", + "log", +] + +[[package]] +name = "symphonia-metadata" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c3e1937e31d0e068bbe829f66b2f2bfaa28d056365279e0ef897172c3320c0" +dependencies = [ + "encoding_rs", + "lazy_static", + "log", + "symphonia-core", +] + [[package]] name = "syn" version = "1.0.109" @@ -13465,9 +13787,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.38" +version = "2.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" dependencies = [ "proc-macro2", "quote", @@ -13521,14 +13843,14 @@ checksum = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a" [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", "fastrand 2.0.1", - "redox_syscall 0.3.5", - "rustix 0.38.20", + "redox_syscall 0.4.1", + "rustix 0.38.24", "windows-sys 0.48.0", ] @@ -13553,7 +13875,7 @@ dependencies = [ "serde_repr", "sha2 0.10.8", "signature 2.1.0", - "subtle", + "subtle 2.4.1", "subtle-encoding", "tendermint-proto", "time", @@ -13590,9 +13912,9 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" dependencies = [ "winapi-util", ] @@ -13622,7 +13944,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -13634,7 +13956,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", "test-case-core", ] @@ -13664,7 +13986,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -13806,9 +14128,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.33.0" +version = "1.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" +checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" dependencies = [ "backtrace", "bytes", @@ -13818,20 +14140,20 @@ dependencies = [ "parking_lot 0.12.1", "pin-project-lite 0.2.13", "signal-hook-registry", - "socket2 0.5.4", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -13855,24 +14177,13 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.9", - "tokio", - "webpki 0.22.4", -] - [[package]] name = "tokio-rustls" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.7", + "rustls 0.21.8", "tokio", ] @@ -13896,18 +14207,18 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", - "rustls 0.21.7", + "rustls 0.21.8", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", "tungstenite", "webpki-roots 0.25.2", ] [[package]] name = "tokio-util" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ "bytes", "futures-core", @@ -13936,14 +14247,14 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.19.15", ] [[package]] name = "toml_datetime" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" dependencies = [ "serde", ] @@ -13954,13 +14265,24 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "serde", "serde_spanned", "toml_datetime", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.20.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +dependencies = [ + "indexmap 2.1.0", + "toml_datetime", + "winnow", +] + [[package]] name = "tower" version = "0.4.13" @@ -14026,7 +14348,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] @@ -14051,12 +14373,12 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] @@ -14085,7 +14407,7 @@ dependencies = [ "serde", "serde_json", "sharded-slab", - "smallvec 1.11.1", + "smallvec 1.11.2", "thread_local", "tracing", "tracing-core", @@ -14103,7 +14425,20 @@ dependencies = [ "hashbrown 0.13.2", "log", "rustc-hex", - "smallvec 1.11.1", + "smallvec 1.11.2", +] + +[[package]] +name = "trie-db" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff28e0f815c2fea41ebddf148e008b077d2faddb026c9555b29696114d602642" +dependencies = [ + "hash-db", + "hashbrown 0.13.2", + "log", + "rustc-hex", + "smallvec 1.11.2", ] [[package]] @@ -14132,8 +14467,8 @@ dependencies = [ "ipnet", "lazy_static", "rand 0.8.5", - "smallvec 1.11.1", - "socket2 0.4.9", + "smallvec 1.11.2", + "socket2 0.4.10", "thiserror", "tinyvec", "tokio", @@ -14154,7 +14489,7 @@ dependencies = [ "lru-cache", "parking_lot 0.12.1", "resolv-conf", - "smallvec 1.11.1", + "smallvec 1.11.2", "thiserror", "tokio", "tracing", @@ -14170,10 +14505,10 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "try-runtime-cli" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate?branch=polkadot-v0.9.43#5e49f6e44820affccaf517fd22af564f4b495d40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0#401f8a3e9448db854f5605b679fa085b8f445039" dependencies = [ "async-trait", - "clap 4.4.7", + "clap 4.4.8", "frame-remote-externalities", "frame-try-runtime", "hex", @@ -14181,25 +14516,24 @@ dependencies = [ "parity-scale-codec", "sc-cli", "sc-executor", - "sc-service", "serde", "serde_json", "sp-api", "sp-consensus-aura", "sp-consensus-babe", - "sp-core 7.0.0", - "sp-debug-derive 5.0.0", - "sp-externalities 0.13.0", + "sp-core 21.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-debug-derive 8.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-externalities 0.19.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-inherents", - "sp-io 7.0.0", - "sp-keystore 0.13.0", + "sp-io 23.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-keystore 0.27.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-rpc", - "sp-runtime 7.0.0", - "sp-state-machine 0.13.0", + "sp-runtime 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", + "sp-state-machine 0.28.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "sp-timestamp", "sp-transaction-storage-proof", "sp-version", - "sp-weights 4.0.0", + "sp-weights 20.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=release-polkadot-v1.3.0)", "substrate-rpc-client", "zstd 0.12.4", ] @@ -14223,7 +14557,7 @@ dependencies = [ "httparse", "log", "rand 0.8.5", - "rustls 0.21.7", + "rustls 0.21.8", "sha1", "thiserror", "url", @@ -14346,7 +14680,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" dependencies = [ "generic-array 0.14.7", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -14356,7 +14690,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" dependencies = [ "crypto-common", - "subtle", + "subtle 2.4.1", ] [[package]] @@ -14412,7 +14746,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "serde", ] @@ -14422,7 +14756,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "serde", ] @@ -14495,6 +14829,30 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +[[package]] +name = "w3f-bls" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7335e4c132c28cc43caef6adb339789e599e39adbe78da0c4d547fad48cbc331" +dependencies = [ + "ark-bls12-377", + "ark-bls12-381", + "ark-ec", + "ark-ff 0.4.2", + "ark-serialize 0.4.2", + "ark-serialize-derive", + "arrayref", + "constcat", + "digest 0.10.7", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_core 0.6.4", + "sha2 0.10.8", + "sha3", + "thiserror", + "zeroize", +] + [[package]] name = "waitgroup" version = "0.1.2" @@ -14543,9 +14901,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -14553,24 +14911,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" dependencies = [ "cfg-if", "js-sys", @@ -14580,9 +14938,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -14590,22 +14948,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" [[package]] name = "wasm-instrument" @@ -14618,9 +14976,9 @@ dependencies = [ [[package]] name = "wasm-opt" -version = "0.112.0" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fef6d0d508f08334e0ab0e6877feb4c0ecb3956bcf2cb950699b22fedf3e9c" +checksum = "fc942673e7684671f0c5708fc18993569d184265fd5223bb51fc8e5b9b6cfd52" dependencies = [ "anyhow", "libc", @@ -14634,9 +14992,9 @@ dependencies = [ [[package]] name = "wasm-opt-cxx-sys" -version = "0.112.0" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc816bbc1596c8f2e8127e137a760c798023ef3d378f2ae51f0f1840e2dfa445" +checksum = "8c57b28207aa724318fcec6575fe74803c23f6f266fce10cbc9f3f116762f12e" dependencies = [ "anyhow", "cxx", @@ -14646,9 +15004,9 @@ dependencies = [ [[package]] name = "wasm-opt-sys" -version = "0.112.0" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40199e4f68ef1071b3c6d0bd8026a12b481865d4b9e49c156932ea9a6234dd14" +checksum = "8a1cce564dc768dacbdb718fc29df2dba80bd21cb47d8f77ae7e3d95ceb98cbe" dependencies = [ "anyhow", "cc", @@ -14671,39 +15029,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "wasmi" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06c326c93fbf86419608361a2c925a31754cf109da1b8b55737070b4d6669422" -dependencies = [ - "parity-wasm", - "wasmi-validation", - "wasmi_core", -] - -[[package]] -name = "wasmi-validation" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ff416ad1ff0c42e5a926ed5d5fab74c0f098749aa0ad8b2a34b982ce0e867b" -dependencies = [ - "parity-wasm", -] - -[[package]] -name = "wasmi_core" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d20cb3c59b788653d99541c646c561c9dd26506f25c0cebfe810659c54c6d7" -dependencies = [ - "downcast-rs", - "libm", - "memory_units", - "num-rational", - "num-traits 0.2.17", -] - [[package]] name = "wasmparser" version = "0.102.0" @@ -14758,12 +15083,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c86437fa68626fe896e5afc69234bb2b5894949083586535f200385adfd71213" dependencies = [ "anyhow", - "base64 0.21.4", + "base64 0.21.5", "bincode 1.3.3", "directories-next", "file-per-thread-logger", "log", - "rustix 0.36.16", + "rustix 0.36.17", "serde", "sha2 0.10.8", "toml 0.5.11", @@ -14859,7 +15184,7 @@ checksum = "6e0554b84c15a27d76281d06838aed94e13a77d7bf604bbbaf548aa20eb93846" dependencies = [ "object 0.30.4", "once_cell", - "rustix 0.36.16", + "rustix 0.36.17", ] [[package]] @@ -14890,7 +15215,7 @@ dependencies = [ "memoffset 0.8.0", "paste", "rand 0.8.5", - "rustix 0.36.16", + "rustix 0.36.17", "wasmtime-asm-macros", "wasmtime-environ", "wasmtime-jit-debug", @@ -14911,9 +15236,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" dependencies = [ "js-sys", "wasm-bindgen", @@ -14948,15 +15273,6 @@ dependencies = [ "webpki 0.22.4", ] -[[package]] -name = "webpki-roots" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" -dependencies = [ - "rustls-webpki 0.100.3", -] - [[package]] name = "webpki-roots" version = "0.25.2" @@ -15050,7 +15366,7 @@ dependencies = [ "sha1", "sha2 0.10.8", "signature 1.6.4", - "subtle", + "subtle 2.4.1", "thiserror", "tokio", "webpki 0.21.4", @@ -15090,7 +15406,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f08dfd7a6e3987e255c4dbe710dde5d94d0f0574f8a21afa95d171376c143106" dependencies = [ "log", - "socket2 0.4.9", + "socket2 0.4.10", "thiserror", "tokio", "webrtc-util", @@ -15144,7 +15460,7 @@ dependencies = [ "rtcp", "rtp", "sha-1", - "subtle", + "subtle 2.4.1", "thiserror", "tokio", "webrtc-util", @@ -15180,7 +15496,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.20", + "rustix 0.38.24", ] [[package]] @@ -15230,6 +15546,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows" version = "0.51.1" @@ -15383,9 +15708,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winnow" -version = "0.5.17" +version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3b801d0e0a6726477cc207f60162da452f3a95adb368399bef20a946e06f65c" +checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" dependencies = [ "memchr", ] @@ -15538,6 +15863,26 @@ dependencies = [ "time", ] +[[package]] +name = "zerocopy" +version = "0.7.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + [[package]] name = "zeroize" version = "1.6.0" @@ -15555,7 +15900,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.38", + "syn 2.0.39", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 458ce8302b..5451a1900a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,13 +15,11 @@ members = [ "crates/primitives/storage", "crates/primitives/commitments", "crates/primitives/chain-id", - "crates/client/block-proposer", "crates/client/db", "crates/client/rpc-core", "crates/client/rpc", "crates/client/mapping-sync", "crates/client/storage", - "crates/client/transaction-pool", "crates/client/deoxys", "crates/client/commitment-state-diff", "starknet-rpc-test", @@ -43,16 +41,18 @@ default-members = [ "crates/primitives/storage", "crates/primitives/commitments", "crates/primitives/chain-id", - "crates/client/block-proposer", "crates/client/db", "crates/client/rpc-core", "crates/client/rpc", "crates/client/mapping-sync", "crates/client/storage", - "crates/client/transaction-pool", "crates/client/commitment-state-diff", ] +[profile.dev] +panic = "abort" +incremental = true + [profile.release] panic = "unwind" @@ -71,79 +71,81 @@ version = "0.5.0" [workspace.dependencies] # Substrate frame dependencies -frame-executive = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-support = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-benchmarking = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-benchmarking-cli = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-system-benchmarking = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-system-rpc-runtime-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -frame-try-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -substrate-frame-rpc-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } +frame-executive = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-benchmarking-cli = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-system-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-system-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +frame-try-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +substrate-frame-rpc-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } # Substrate primitives dependencies -sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-std = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-io = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-consensus-aura = { git = "http://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-consensus = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-consensus-grandpa = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-timestamp = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-inherents = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-keyring = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-blockchain = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-block-builder = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-offchain = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-session = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-version = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-database = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-arithmetic = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-storage = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-state-machine = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-trie = { version = "7.0.0", git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sp-tracing = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } +sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-consensus-aura = { git = "http://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-consensus = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-timestamp = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-inherents = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-blockchain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-block-builder = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-offchain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-session = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-version = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-database = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-arithmetic = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-storage = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-state-machine = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-statement-store = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-trie = { version = "22.0.0", git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sp-tracing = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } # Substrate client dependencies -sc-client-db = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", features = [ +sc-client-db = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", features = [ "rocksdb", ] } -sc-network = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-network-common = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-network-sync = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-consensus = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } +sc-network = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-network-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-network-sync = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-consensus = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } # For integration tests in order to create blocks on demand -sc-consensus-manual-seal = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43", default-features = false } -sc-consensus-grandpa = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-rpc = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-rpc-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-basic-authorship = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-client-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-cli = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-executor = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-service = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-telemetry = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-keystore = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-transaction-pool-api = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-consensus-aura = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-block-builder = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-proposer-metrics = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-utils = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -substrate-test-runtime-client = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } +sc-consensus-manual-seal = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0", default-features = false } +sc-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-rpc = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-rpc-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-client-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-cli = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-executor = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-service = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-telemetry = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-keystore = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-transaction-pool-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-offchain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-block-builder = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-proposer-metrics = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +sc-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +substrate-test-runtime-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } # Substrate build & tools dependencies -substrate-build-script-utils = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -substrate-wasm-builder = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -prometheus-endpoint = { package = "substrate-prometheus-endpoint", version = "0.10.0-dev", git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } +substrate-build-script-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +substrate-wasm-builder = { git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +prometheus-endpoint = { package = "substrate-prometheus-endpoint", version = "0.10.0-dev", git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } # Substrate Frame pallet -pallet-aura = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -pallet-grandpa = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -pallet-timestamp = { default-features = false, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } +pallet-aura = { default-features = false, git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +pallet-grandpa = { default-features = false, git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } +pallet-timestamp = { default-features = false, git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } # Madara pallets pallet-starknet = { path = "crates/pallets/starknet", default-features = false } @@ -167,8 +169,6 @@ mc-db = { path = "crates/client/db" } mc-storage = { path = "crates/client/storage" } mc-rpc = { path = "crates/client/rpc" } mc-rpc-core = { path = "crates/client/rpc-core" } -mc-block-proposer = { path = "crates/client/block-proposer" } -mc-transaction-pool = { path = "crates/client/transaction-pool" } mc-data-availability = { path = "crates/client/data-availability" } mc-commitment-state-diff = { path = "crates/client/commitment-state-diff" } @@ -184,7 +184,7 @@ cairo-vm = { git = "https://github.com/keep-starknet-strange/cairo-rs", branch = starknet-crypto = { version = "0.6.1", default-features = false } starknet-core = { version = "0.6.0", default-features = false } starknet-gateway = { version = "0.6.0", default-features = false } -starknet-ff = { version = "0.3.4", default-features = false } +starknet-ff = { version = "0.3.5", default-features = false } starknet-signers = { version = "0.4.0" } starknet-accounts = { version = "0.5.0" } starknet-contract = { version = "0.5.0" } @@ -214,13 +214,13 @@ log = { version = "0.4.20", default-features = false } hex = { version = "0.4.3", default-features = false } safe-mix = { version = "1.0", default-features = false } jsonrpsee = { version = "0.16.3", default-features = false } -clap = { version = "4.4.7", default-features = false } +clap = { version = "4.4.8", default-features = false } futures = { version = "0.3.29", default-features = false } futures-timer = { version = "3.0.2", default-features = false } md5 = { version = "0.7.0", default-features = false } reqwest = { version = "0.11.22", default-features = false } -serde = { version = "1.0.190", default-features = false } -serde_json = { version = "1.0.107", default-features = false } +serde = { version = "1.0.192", default-features = false } +serde_json = { version = "1.0.108", default-features = false } serde_with = { version = "2.3.3", default-features = false } bitvec = { version = "1", default-features = false } thiserror = "1.0.50" @@ -285,4 +285,8 @@ tracing-subscriber = "0.3.16" tower = "0.4" url = "2.4.1" hashbrown = "0.14.2" -tokio = "1.33.0" +tokio = "1.34.0" +openssl = { version = "0.10", features = ["vendored"] } + +[patch."https://github.com/w3f/ring-vrf"] +bandersnatch_vrfs = { git = "https://github.com/w3f/ring-vrf?rev=3ddc20", version = "0.0.4", rev = "3ddc20" } diff --git a/Dockerfile b/Dockerfile index 18da783256..3adf7732f8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,8 @@ FROM rust:slim-buster as builder RUN apt-get -y update; \ apt-get install -y --no-install-recommends \ - libssl-dev make clang-11 g++ llvm protobuf-compiler \ - pkg-config libz-dev zstd git; \ + libssl-dev make clang-11 g++ llvm protobuf-compiler libprotobuf-dev \ + pkg-config libz-dev zstd git build-essential; \ apt-get autoremove -y; \ apt-get clean; \ rm -rf /var/lib/apt/lists/* diff --git a/configs/genesis-assets/ArgentMulticall.casm.json b/configs/genesis-assets/ArgentMulticall.casm.json new file mode 100644 index 0000000000..60f4cb79f9 --- /dev/null +++ b/configs/genesis-assets/ArgentMulticall.casm.json @@ -0,0 +1,2057 @@ +{ + "prime": "0x800000000000011000000000000000000000000000000000000000000000001", + "compiler_version": "2.0.0", + "bytecode": [ + "0x40780017fff7fff", + "0x1", + "0xa0680017fff8000", + "0x7", + "0x482680017ffa8000", + "0xffffffffffffffffffffffffffff8f94", + "0x400280007ff97fff", + "0x10780017fff7fff", + "0x99", + "0x4825800180007ffa", + "0x706c", + "0x400280007ff97fff", + "0x482680017ff98000", + "0x1", + "0x48127ffe7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x9e", + "0x20680017fff7ffa", + "0x85", + "0x20680017fff7ffd", + "0x75", + "0x48307ffb80007ffc", + "0x4824800180007fff", + "0x0", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0x6", + "0x480680017fff8000", + "0x0", + "0x10780017fff7fff", + "0x4", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x1", + "0x48307ffe80007fff", + "0x20680017fff7fff", + "0x52", + "0x1104800180018000", + "0x5ec", + "0x482480017fff8000", + "0x5eb", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4824800180007fef", + "0x0", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400080007fec7fff", + "0x10780017fff7fff", + "0x35", + "0x4824800180007fef", + "0x0", + "0x400080007fed7fff", + "0x482480017fed8000", + "0x1", + "0x48127ffe7fff8000", + "0x480a7ffb7fff8000", + "0x48127ff07fff8000", + "0x48127ff07fff8000", + "0x1104800180018000", + "0xb7", + "0x40137ffb7fff8000", + "0x20680017fff7ffc", + "0x1f", + "0x40780017fff7fff", + "0x1", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x48127ff97fff8000", + "0x1104800180018000", + "0xd8", + "0x20680017fff7ffd", + "0xa", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480a80007fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480a80007fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480a80007fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482480017fea8000", + "0x1", + "0x48127fea7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x482480017ff98000", + "0x1", + "0x208b7fff7fff7ffe", + "0x480a7ffb7fff8000", + "0x1104800180018000", + "0xc9", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x496e70757420746f6f206c6f6e6720666f7220617267756d656e7473", + "0x400080007ffe7fff", + "0x48127fed7fff8000", + "0x48127fed7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x482480017ff98000", + "0x1", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x496e70757420746f6f2073686f727420666f7220617267756d656e7473", + "0x400080007ffe7fff", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x482480017ff98000", + "0x1", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff98000", + "0x1", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x482480017ff98000", + "0x1", + "0x208b7fff7fff7ffe", + "0x48297ffc80007ffd", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ffc8000", + "0x1", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffc7fff8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x20680017fff7ffc", + "0x26", + "0x40780017fff7fff", + "0x1", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffa7fff8000", + "0x480080007ff68000", + "0x1104800180018000", + "0x76", + "0x20680017fff7ffa", + "0xc", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x1104800180018000", + "0xca", + "0x20680017fff7ffd", + "0x1f", + "0x480a7ff97fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x40137ffa7fff8000", + "0x1104800180018000", + "0xd5", + "0x20680017fff7ffd", + "0xb", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a80007fff8000", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ff97fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ff97fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0xd0", + "0x480a7ff77fff8000", + "0x480a7ff87fff8000", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x1104800180018000", + "0xd0", + "0x20680017fff7ffd", + "0x9", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ffd7fff8000", + "0x208b7fff7fff7ffe", + "0x1104800180018000", + "0x4d0", + "0x482480017fff8000", + "0x4cf", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4825800180007ff8", + "0x429a", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400280007ff77fff", + "0x10780017fff7fff", + "0x60", + "0x4825800180007ff8", + "0x429a", + "0x400280007ff77fff", + "0x482680017ff78000", + "0x1", + "0x20780017fff7ffd", + "0xd", + "0x48127fff7fff8000", + "0x48127ffd7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffb7fff8000", + "0x480a7ffc7fff8000", + "0x208b7fff7fff7ffe", + "0x48127fff7fff8000", + "0x48127ffd7fff8000", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x1104800180018000", + "0xb7", + "0x20680017fff7ff8", + "0x39", + "0x20680017fff7ffb", + "0x2a", + "0x400280007ffc7ffc", + "0x400280017ffc7ffd", + "0x400280027ffc7ffe", + "0x400280037ffc7fff", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x480a7ffb7fff8000", + "0x482680017ffc8000", + "0x4", + "0x4825800180007ffd", + "0x1", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffffca", + "0x20680017fff7ffa", + "0xc", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff78000", + "0x1", + "0x480a7ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x482480017ff78000", + "0x1", + "0x208b7fff7fff7ffe", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0xc5", + "0x20680017fff7ffd", + "0xa", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480080007ffb8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff87fff8000", + "0x1104800180018000", + "0xbc", + "0x20680017fff7ffa", + "0xa", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ffb7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x15d", + "0x48127ffe7fff8000", + "0x48127ffe7fff8000", + "0x208b7fff7fff7ffe", + "0x48297ffa80007ffb", + "0x4844800180007fff", + "0x2", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x158", + "0x480a7ff87fff8000", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x1104800180018000", + "0x158", + "0x20680017fff7ffd", + "0x9", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x2", + "0x480a7ffa7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x1b0", + "0x20680017fff7ffe", + "0x54", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x1104800180018000", + "0x1e3", + "0x40137ff07fff8001", + "0x20680017fff7ffe", + "0x3c", + "0x48127fec7fff8000", + "0x480a7ffb7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x40137ffb7fff8000", + "0x1104800180018000", + "0x1fd", + "0x20680017fff7ffa", + "0x22", + "0x20680017fff7ffd", + "0xf", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x480a80017fff8000", + "0x480a80007fff8000", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x208b7fff7fff7ffe", + "0x48127fec7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x1ec", + "0x20680017fff7ffd", + "0xa", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480080007ffb8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x1104800180018000", + "0x372", + "0x482480017fff8000", + "0x371", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4825800180007ff7", + "0x41dc", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400280007ff67fff", + "0x10780017fff7fff", + "0x95", + "0x4825800180007ff7", + "0x41dc", + "0x400280007ff67fff", + "0x482680017ff68000", + "0x1", + "0x48297ff980007ffa", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ff98000", + "0x4", + "0x480a7ffa7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ff97fff8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x20680017fff7ffc", + "0x6e", + "0x480080007ffd8000", + "0x480080017ffc8000", + "0x480080027ffb8000", + "0x480080037ffa8000", + "0x48127ffe7fff8000", + "0x48127ffe7fff8000", + "0x480680017fff8000", + "0x43616c6c436f6e7472616374", + "0x400280007ff87fff", + "0x400280017ff87ff0", + "0x400280027ff87ff9", + "0x400280037ff87ffa", + "0x400280047ff87ffd", + "0x400280057ff87ffe", + "0x480280077ff88000", + "0x20680017fff7fff", + "0x2e", + "0x480280087ff88000", + "0x480280097ff88000", + "0x400280007ffd7ffe", + "0x400280017ffd7fff", + "0x48127fee7fff8000", + "0x480280067ff88000", + "0x482680017ff88000", + "0xa", + "0x48127ff17fff8000", + "0x48127ff17fff8000", + "0x482680017ffb8000", + "0x1", + "0x480a7ffc7fff8000", + "0x482680017ffd8000", + "0x2", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffffb5", + "0x20680017fff7ffa", + "0xd", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x617267656e742f6d756c746963616c6c2d6661696c6564", + "0x400080007ffe7fff", + "0x400180017ffe7ffb", + "0x48127fee7fff8000", + "0x480280067ff88000", + "0x48127ffc7fff8000", + "0x482480017ffb8000", + "0x2", + "0x480280087ff88000", + "0x480280097ff88000", + "0x402780017ff88000", + "0xa", + "0x1104800180018000", + "0x18c", + "0x20680017fff7ffd", + "0x10", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480a80007fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480a80007fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff77fff8000", + "0x48127ff77fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff67fff8000", + "0x480a7ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x480a7ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff68000", + "0x1", + "0x480a7ff77fff8000", + "0x480a7ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff77fff8000", + "0x482480017ff68000", + "0x1", + "0x208b7fff7fff7ffe", + "0x400380007ffd7ffb", + "0x480a7ffc7fff8000", + "0x482680017ffd8000", + "0x1", + "0x208b7fff7fff7ffe", + "0x480a7ffb7fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x800000000000010fffffffffffffffffffffffffffffffffffffffffffffff9", + "0x48127ffe7fff8000", + "0x48127ffe7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x2", + "0x1104800180018000", + "0x2ad", + "0x482480017fff8000", + "0x2ac", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4825800180007ff9", + "0x25a8", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400280007ff87fff", + "0x10780017fff7fff", + "0x53", + "0x4825800180007ff9", + "0x25a8", + "0x400280007ff87fff", + "0x482680017ff88000", + "0x1", + "0x48297ffa80007ffb", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ffa8000", + "0x2", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffa7fff8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x40137ffe7fff8000", + "0x40137fff7fff8001", + "0x20680017fff7ffc", + "0x2a", + "0x48127ff87fff8000", + "0x48127ff67fff8000", + "0x480080007ffb8000", + "0x480080017ffa8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x123", + "0x20680017fff7ffd", + "0x19", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480a80007fff8000", + "0x480a80017fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffffc3", + "0x20680017fff7ffd", + "0x8", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x10780017fff7fff", + "0x14", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff67fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff88000", + "0x1", + "0x480a7ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x482480017ffa8000", + "0x1", + "0x208b7fff7fff7ffe", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x37", + "0x20680017fff7ffe", + "0x2b", + "0xa0680017fff8004", + "0xe", + "0x4824800180047ffe", + "0x800000000000000000000000000000000000000000000000000000000000000", + "0x484480017ffe8000", + "0x110000000000000000", + "0x48307ffe7fff8002", + "0x480280007ffb7ffc", + "0x480280017ffb7ffc", + "0x402480017ffb7ffd", + "0xffffffffffffffeeffffffffffffffff", + "0x400280027ffb7ffd", + "0x10780017fff7fff", + "0x14", + "0x484480017fff8001", + "0x8000000000000000000000000000000", + "0x48307fff80007ffd", + "0x480280007ffb7ffd", + "0x480280017ffb7ffd", + "0x402480017ffc7ffe", + "0xf8000000000000000000000000000000", + "0x400280027ffb7ffe", + "0x40780017fff7fff", + "0x1", + "0x482680017ffb8000", + "0x3", + "0x48127ff57fff8000", + "0x48127ff57fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff57fff8000", + "0x208b7fff7fff7ffe", + "0x482680017ffb8000", + "0x3", + "0x48127ff57fff8000", + "0x48127ff57fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x6", + "0x480a7ffb7fff8000", + "0x48127ff57fff8000", + "0x48127ff57fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x48297ffc80007ffd", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ffc8000", + "0x1", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffc7fff8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x20680017fff7ffc", + "0x8", + "0x48127ffe7fff8000", + "0x48127ffe7fff8000", + "0x480680017fff8000", + "0x0", + "0x480080007ffa8000", + "0x208b7fff7fff7ffe", + "0x48127ffe7fff8000", + "0x48127ffe7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x48297ffc80007ffd", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ffc8000", + "0x1", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffc7fff8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x20680017fff7ffc", + "0x26", + "0x40780017fff7fff", + "0x1", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffa7fff8000", + "0x480080007ff68000", + "0x1104800180018000", + "0x88", + "0x20680017fff7ffa", + "0xc", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x480680017fff8000", + "0x476574457865637574696f6e496e666f", + "0x400280007ffd7fff", + "0x400380017ffd7ffc", + "0x480280037ffd8000", + "0x20680017fff7fff", + "0xc", + "0x480280027ffd8000", + "0x482680017ffd8000", + "0x5", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480280047ffd8000", + "0x10780017fff7fff", + "0x9", + "0x480280027ffd8000", + "0x482680017ffd8000", + "0x6", + "0x480680017fff8000", + "0x1", + "0x480280047ffd8000", + "0x480280057ffd8000", + "0x1104800180018000", + "0xb4", + "0x20680017fff7ffd", + "0xa", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff67fff8000", + "0x48127ff67fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ff87fff8000", + "0x480a7ff97fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x1104800180018000", + "0xa8", + "0x20680017fff7ffb", + "0x9", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48297ffa80007ffb", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x800000000000010fffffffffffffffffffffffffffffffffffffffffffffe9f", + "0x480a7ff87fff8000", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x1104800180018000", + "0xee", + "0x20680017fff7ffd", + "0x9", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x1104800180018000", + "0x13b", + "0x482480017fff8000", + "0x13a", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4825800180007ff8", + "0x12a2", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400280007ff77fff", + "0x10780017fff7fff", + "0x4c", + "0x4825800180007ff8", + "0x12a2", + "0x400280007ff77fff", + "0x482680017ff78000", + "0x1", + "0x20780017fff7ffd", + "0xd", + "0x48127fff7fff8000", + "0x48127ffd7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffb7fff8000", + "0x480a7ffc7fff8000", + "0x208b7fff7fff7ffe", + "0x480a7ff97fff8000", + "0x480a7ffa7fff8000", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffff13", + "0x20680017fff7ffe", + "0x27", + "0x400280007ffc7fff", + "0x48127ff07fff8000", + "0x48127fee7fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480a7ffb7fff8000", + "0x482680017ffc8000", + "0x1", + "0x4825800180007ffd", + "0x1", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffffd1", + "0x20680017fff7ffa", + "0xc", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x48127ff87fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff07fff8000", + "0x48127fee7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff78000", + "0x1", + "0x480a7ff87fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff87fff8000", + "0x482480017ff78000", + "0x1", + "0x208b7fff7fff7ffe", + "0x20780017fff7ffb", + "0x8", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x480a7ffd7fff8000", + "0x208b7fff7fff7ffe", + "0x480680017fff8000", + "0x1", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x208b7fff7fff7ffe", + "0x1104800180018000", + "0xc2", + "0x482480017fff8000", + "0xc1", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4825800180007ff9", + "0xd2a", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400280007ff87fff", + "0x10780017fff7fff", + "0x45", + "0x4825800180007ff9", + "0xd2a", + "0x400280007ff87fff", + "0x482680017ff88000", + "0x1", + "0x48297ffa80007ffb", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ffa8000", + "0x1", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x480280007ffa8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x20680017fff7ffe", + "0x22", + "0x400280007ffd7fff", + "0x48127ffa7fff8000", + "0x48127ff87fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x480a7ffc7fff8000", + "0x482680017ffd8000", + "0x1", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffffd1", + "0x20680017fff7ffb", + "0xb", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x208b7fff7fff7ffe", + "0x48127ffa7fff8000", + "0x48127ff87fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x48127ff97fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff88000", + "0x1", + "0x480a7ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x480680017fff8000", + "0x0", + "0x48127ff97fff8000", + "0x482480017ff88000", + "0x1", + "0x208b7fff7fff7ffe", + "0x1104800180018000", + "0x5f", + "0x482480017fff8000", + "0x5e", + "0x480080007fff8000", + "0xa0680017fff8000", + "0x9", + "0x4825800180007ff9", + "0x11da", + "0x482480017fff8000", + "0x100000000000000000000000000000000", + "0x400280007ff87fff", + "0x10780017fff7fff", + "0x45", + "0x4825800180007ff9", + "0x11da", + "0x400280007ff87fff", + "0x482680017ff88000", + "0x1", + "0x48297ffa80007ffb", + "0x20680017fff7fff", + "0x4", + "0x10780017fff7fff", + "0xa", + "0x482680017ffa8000", + "0x1", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x0", + "0x480a7ffa7fff8000", + "0x10780017fff7fff", + "0x8", + "0x480a7ffa7fff8000", + "0x480a7ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x480680017fff8000", + "0x0", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x20680017fff7ffc", + "0x1e", + "0x480080007ffd8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x1104800180018000", + "0x800000000000010fffffffffffffffffffffffffffffffffffffffffffffd77", + "0x48127ff17fff8000", + "0x48127fef7fff8000", + "0x48127ff57fff8000", + "0x48127ff57fff8000", + "0x48127ffa7fff8000", + "0x48127ffa7fff8000", + "0x1104800180018000", + "0x800000000000010ffffffffffffffffffffffffffffffffffffffffffffffcc", + "0x20680017fff7ffd", + "0x8", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x10780017fff7fff", + "0xd", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x48127ff87fff8000", + "0x48127ff67fff8000", + "0x480a7ffc7fff8000", + "0x480a7ffd7fff8000", + "0x48127ffc7fff8000", + "0x48127ffc7fff8000", + "0x480680017fff8000", + "0x0", + "0x48127ffb7fff8000", + "0x48127ffb7fff8000", + "0x208b7fff7fff7ffe", + "0x40780017fff7fff", + "0x1", + "0x480680017fff8000", + "0x4f7574206f6620676173", + "0x400080007ffe7fff", + "0x482680017ff88000", + "0x1", + "0x480a7ff97fff8000", + "0x480680017fff8000", + "0x1", + "0x48127ffb7fff8000", + "0x482480017ffa8000", + "0x1", + "0x208b7fff7fff7ffe" + ], + "hints": [ + [ + 2, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x706c" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -6 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 46, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x0" + }, + "rhs": { + "Deref": { + "register": "AP", + "offset": -16 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 69, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 106, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 124, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 138, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 160, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 198, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 330, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x429a" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -8 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 433, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 474, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 680, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x41dc" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -9 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 731, + [ + { + "SystemCall": { + "system": { + "Deref": { + "register": "FP", + "offset": -8 + } + } + } + } + ] + ], + [ + 778, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 836, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 877, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x25a8" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -7 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 967, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 987, + [ + { + "TestLessThan": { + "lhs": { + "Deref": { + "register": "AP", + "offset": -1 + } + }, + "rhs": { + "Immediate": "0x800000000000000000000000000000000000000000000000000000000000000" + }, + "dst": { + "register": "AP", + "offset": 4 + } + } + } + ] + ], + [ + 991, + [ + { + "LinearSplit": { + "value": { + "Deref": { + "register": "AP", + "offset": 3 + } + }, + "scalar": { + "Immediate": "0x110000000000000000" + }, + "max_x": { + "Immediate": "0xffffffffffffffffffffffffffffffff" + }, + "x": { + "register": "AP", + "offset": -2 + }, + "y": { + "register": "AP", + "offset": -1 + } + } + } + ] + ], + [ + 1001, + [ + { + "LinearSplit": { + "value": { + "Deref": { + "register": "AP", + "offset": -2 + } + }, + "scalar": { + "Immediate": "0x8000000000000000000000000000000" + }, + "max_x": { + "Immediate": "0xffffffffffffffffffffffffffffffff" + }, + "x": { + "register": "AP", + "offset": -1 + }, + "y": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1097, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1150, + [ + { + "SystemCall": { + "system": { + "Deref": { + "register": "FP", + "offset": -3 + } + } + } + } + ] + ], + [ + 1247, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x12a2" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -8 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1330, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1368, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0xd2a" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -7 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1444, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1467, + [ + { + "TestLessThanOrEqual": { + "lhs": { + "Immediate": "0x11da" + }, + "rhs": { + "Deref": { + "register": "FP", + "offset": -7 + } + }, + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ], + [ + 1543, + [ + { + "AllocSegment": { + "dst": { + "register": "AP", + "offset": 0 + } + } + } + ] + ] + ], + "entry_points_by_type": { + "EXTERNAL": [ + { + "selector": "0x23ce8154ba7968a9d040577a2140e30474cee3aad4ba52d26bc483e648643f4", + "offset": 0, + "builtins": ["range_check"] + } + ], + "L1_HANDLER": [], + "CONSTRUCTOR": [] + } +} diff --git a/configs/genesis-assets/genesis.json b/configs/genesis-assets/genesis.json index c4c975ab05..618bcc31b5 100644 --- a/configs/genesis-assets/genesis.json +++ b/configs/genesis-assets/genesis.json @@ -14,6 +14,13 @@ "version": 0 } ], + [ + "0x01d53d50d204842575c87f25161248aadb26d33c7375d0f043e2a3f1243cf874", + { + "path": "genesis-assets/ArgentMulticall.casm.json", + "version": 1 + } + ], [ "0x0424b7f61e3c5dfd74400d96fdea7e1f0bf2757f31df04387eaa957f095dd7b9", { "path": "genesis-assets/Proxy.json", "version": 0 } @@ -119,9 +126,34 @@ [ "0x041a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf", "0x07b3e05f48f0c69e4a65ce5e076a66271a527aff2c34ce1083ec6e1526997a69" + ], + [ + "0x05754af3760f3356da99aea5c3ec39ccac7783d925a19666ebbeca58ff0087f4", + "0x01d53d50d204842575c87f25161248aadb26d33c7375d0f043e2a3f1243cf874" ] ], "storage": [ + [ + [ + "0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "0x341c1bdfd89f69748aa00b5742b03adbffd79b8e80cab5c50d91cd8c2a79be1" + ], + "0x4574686572" + ], + [ + [ + "0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "0x0b6ce5410fca59d078ee9b2a4371a9d684c530d697c64fbef0ae6d5e8f0ac72" + ], + "0x455448" + ], + [ + [ + "0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "0x1f0d4aa99431d246bac9b8e48c33e888245b15e9678f64f9bdfc8823dc8f979" + ], + "0x12" + ], [ [ "0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", diff --git a/configs/index.json b/configs/index.json index ba20092dc4..11fd3aec74 100644 --- a/configs/index.json +++ b/configs/index.json @@ -13,6 +13,10 @@ "name": "ArgentAccount.json", "md5": "fb96b0c9993878c6a5a5cabf8ce9b3c9" }, + { + "name": "ArgentMulticall.casm.json", + "md5": "506d699bd1e11853a336784bd9dd7be4" + }, { "name": "CallAggregator.json", "md5": "3f418be1aab5ebd4a591e815af65533a" @@ -27,7 +31,7 @@ }, { "name": "genesis.json", - "md5": "06b5c369b9ba1cbf18e0496855ff929c" + "md5": "d35a8d77f9d7d18ff48a5dac2685eb5b" }, { "name": "NoValidateAccount.casm.json", diff --git a/crates/client/block-proposer/Cargo.toml b/crates/client/block-proposer/Cargo.toml deleted file mode 100644 index f447a1c4f9..0000000000 --- a/crates/client/block-proposer/Cargo.toml +++ /dev/null @@ -1,39 +0,0 @@ -[package] -name = "mc-block-proposer" -version.workspace = true -edition.workspace = true -description = "Starknet block proposer implementation." -authors = [ - "Abdelhamid Bakhta ", - "Substrate DevHub ", -] -homepage = "https://github.com/keep-starknet-strange/madara" -license = "MIT" -publish = false -repository = "https://github.com/keep-starknet-strange/madara" - -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.2.2" } -futures = "0.3.29" -futures-timer = "3.0.1" -log = "0.4.20" -prometheus-endpoint = { workspace = true } -sc-block-builder = { workspace = true } -sc-client-api = { workspace = true } -sc-proposer-metrics = { workspace = true } -sc-telemetry = { workspace = true } -sc-transaction-pool-api = { workspace = true } -sp-api = { workspace = true } -sp-blockchain = { workspace = true } -sp-consensus = { workspace = true } -sp-core = { workspace = true } -sp-inherents = { workspace = true } -sp-runtime = { workspace = true } - -[dev-dependencies] -parking_lot = { workspace = true } -sc-transaction-pool = { workspace = true } -substrate-test-runtime-client = { workspace = true } diff --git a/crates/client/block-proposer/src/lib.rs b/crates/client/block-proposer/src/lib.rs deleted file mode 100644 index 89b0852a5b..0000000000 --- a/crates/client/block-proposer/src/lib.rs +++ /dev/null @@ -1,982 +0,0 @@ -//! Block proposer implementation. -//! This crate implements the [`sp_consensus::Proposer`] trait. -//! It is used to build blocks for the block authoring node. -//! The block authoring node is the node that is responsible for building new blocks. -use std::marker::PhantomData; -use std::pin::Pin; -use std::sync::Arc; -use std::time; - -use codec::Encode; -use futures::channel::oneshot; -use futures::future::{Future, FutureExt}; -use futures::{future, select}; -use log::{debug, error, trace, warn}; -use prometheus_endpoint::Registry as PrometheusRegistry; -use sc_block_builder::{BlockBuilderApi, BlockBuilderProvider}; -use sc_client_api::backend; -use sc_proposer_metrics::{EndProposingReason, MetricsLink as PrometheusMetrics}; -use sc_transaction_pool_api::{InPoolTransaction, TransactionPool}; -use sp_api::{ApiExt, ProvideRuntimeApi}; -use sp_blockchain::ApplyExtrinsicFailed::Validity; -use sp_blockchain::Error::ApplyExtrinsicFailed; -use sp_blockchain::HeaderBackend; -use sp_consensus::{DisableProofRecording, ProofRecording, Proposal}; -use sp_core::traits::SpawnNamed; -use sp_inherents::InherentData; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; -use sp_runtime::{Digest, Percent, SaturatedConversion}; - -/// Default block size limit in bytes used by [`Proposer`]. -/// -/// Can be overwritten by [`ProposerFactory::set_default_block_size_limit`]. -/// -/// Be aware that there is also an upper packet size on what the networking code -/// will accept. If the block doesn't fit in such a package, it can not be -/// transferred to other nodes. -pub const DEFAULT_BLOCK_SIZE_LIMIT: usize = 4 * 1024 * 1024 + 512; -/// Default value for `soft_deadline_percent` used by [`Proposer`]. -/// `soft_deadline_percent` value is used to compute soft deadline during block production. -/// The soft deadline indicates where we should stop attempting to add transactions -/// to the block, which exhaust resources. After soft deadline is reached, -/// we switch to a fixed-amount mode, in which after we see `MAX_SKIPPED_TRANSACTIONS` -/// transactions which exhaust resources, we will conclude that the block is full. -const DEFAULT_SOFT_DEADLINE_PERCENT: Percent = Percent::from_percent(80); - -const LOG_TARGET: &str = "block-proposer"; - -/// [`Proposer`] factory. -pub struct ProposerFactory { - spawn_handle: Box, - /// The client instance. - client: Arc, - /// The transaction pool. - transaction_pool: Arc, - /// Prometheus Link, - metrics: PrometheusMetrics, - /// The default block size limit. - /// - /// If no `block_size_limit` is passed to [`sp_consensus::Proposer::propose`], this block size - /// limit will be used. - default_block_size_limit: usize, - /// Soft deadline percentage of hard deadline. - /// - /// The value is used to compute soft deadline during block production. - /// The soft deadline indicates where we should stop attempting to add transactions - /// to the block, which exhaust resources. After soft deadline is reached, - /// we switch to a fixed-amount mode, in which after we see `MAX_SKIPPED_TRANSACTIONS` - /// transactions which exhaust resources, we will conclude that the block is full. - soft_deadline_percent: Percent, - /// phantom member to pin the `Backend`/`ProofRecording` type. - _phantom: PhantomData<(B, PR)>, -} - -impl ProposerFactory { - /// Create a new proposer factory. - /// - /// Proof recording will be disabled when using proposers built by this instance to build - /// blocks. - pub fn new( - spawn_handle: impl SpawnNamed + 'static, - client: Arc, - transaction_pool: Arc, - prometheus: Option<&PrometheusRegistry>, - ) -> Self { - ProposerFactory { - spawn_handle: Box::new(spawn_handle), - transaction_pool, - metrics: PrometheusMetrics::new(prometheus), - default_block_size_limit: DEFAULT_BLOCK_SIZE_LIMIT, - soft_deadline_percent: DEFAULT_SOFT_DEADLINE_PERCENT, - client, - _phantom: PhantomData, - } - } -} - -impl ProposerFactory { - /// Set the default block size limit in bytes. - /// - /// The default value for the block size limit is: - /// [`DEFAULT_BLOCK_SIZE_LIMIT`]. - /// - /// If there is no block size limit passed to [`sp_consensus::Proposer::propose`], this value - /// will be used. - pub fn set_default_block_size_limit(&mut self, limit: usize) { - self.default_block_size_limit = limit; - } - - /// Set soft deadline percentage. - /// - /// The value is used to compute soft deadline during block production. - /// The soft deadline indicates where we should stop attempting to add transactions - /// to the block, which exhaust resources. After soft deadline is reached, - /// we switch to a fixed-amount mode, in which after we see `MAX_SKIPPED_TRANSACTIONS` - /// transactions which exhaust resources, we will conclude that the block is full. - /// - /// Setting the value too low will significantly limit the amount of transactions - /// we try in case they exhaust resources. Setting the value too high can - /// potentially open a DoS vector, where many "exhaust resources" transactions - /// are being tried with no success, hence block producer ends up creating an empty block. - pub fn set_soft_deadline(&mut self, percent: Percent) { - self.soft_deadline_percent = percent; - } -} - -impl ProposerFactory -where - A: TransactionPool + 'static, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi + Send + Sync + 'static, - C::Api: ApiExt> + BlockBuilderApi, -{ - fn init_with_now( - &mut self, - parent_header: &::Header, - now: Box time::Instant + Send + Sync>, - ) -> Proposer { - let parent_hash = parent_header.hash(); - - // info!("🩸 Starting consensus session on top of parent {:?}", parent_hash); - - let proposer = Proposer::<_, _, _, _, PR> { - spawn_handle: self.spawn_handle.clone(), - client: self.client.clone(), - parent_hash, - parent_number: *parent_header.number(), - transaction_pool: self.transaction_pool.clone(), - now, - metrics: self.metrics.clone(), - default_block_size_limit: self.default_block_size_limit, - soft_deadline_percent: self.soft_deadline_percent, - _phantom: PhantomData, - }; - - proposer - } -} - -impl sp_consensus::Environment for ProposerFactory -where - A: TransactionPool + 'static, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi + Send + Sync + 'static, - C::Api: ApiExt> + BlockBuilderApi, - PR: ProofRecording, -{ - type CreateProposer = future::Ready>; - type Proposer = Proposer; - type Error = sp_blockchain::Error; - - fn init(&mut self, parent_header: &::Header) -> Self::CreateProposer { - future::ready(Ok(self.init_with_now(parent_header, Box::new(time::Instant::now)))) - } -} - -/// The proposer logic. -pub struct Proposer { - spawn_handle: Box, - client: Arc, - parent_hash: Block::Hash, - parent_number: <::Header as HeaderT>::Number, - transaction_pool: Arc, - now: Box time::Instant + Send + Sync>, - metrics: PrometheusMetrics, - default_block_size_limit: usize, - soft_deadline_percent: Percent, - _phantom: PhantomData<(B, PR)>, -} - -impl sp_consensus::Proposer for Proposer -where - A: TransactionPool + 'static, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi + Send + Sync + 'static, - C::Api: ApiExt> + BlockBuilderApi, - PR: ProofRecording, -{ - type Transaction = backend::TransactionFor; - type Proposal = - Pin, Self::Error>> + Send>>; - type Error = sp_blockchain::Error; - type ProofRecording = PR; - type Proof = PR::Proof; - - fn propose( - self, - inherent_data: InherentData, - inherent_digests: Digest, - max_duration: time::Duration, - block_size_limit: Option, - ) -> Self::Proposal { - let (tx, rx) = oneshot::channel(); - let spawn_handle = self.spawn_handle.clone(); - - spawn_handle.spawn_blocking( - "madara-block-proposer", - None, - Box::pin(async move { - // Leave some time for evaluation and block finalization (20%) - // and some time for block production (80%). - // We need to benchmark and tune this value. - // Open question: should we make this configurable? - let deadline = (self.now)() + max_duration - max_duration / 5; - let res = self.propose_with(inherent_data, inherent_digests, deadline, block_size_limit).await; - if tx.send(res).is_err() { - trace!("Could not send block production result to proposer!"); - } - }), - ); - - async move { rx.await? }.boxed() - } -} - -/// If the block is full we will attempt to push at most -/// this number of transactions before quitting for real. -/// It allows us to increase block utilization. -const MAX_SKIPPED_TRANSACTIONS: usize = 8; - -impl Proposer -where - A: TransactionPool, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi + Send + Sync + 'static, - C::Api: ApiExt> + BlockBuilderApi, - PR: ProofRecording, -{ - /// Propose a new block. - /// - /// # Arguments - /// * `inherents` - The inherents to include in the block. - /// * `inherent_digests` - The inherent digests to include in the block. - /// * `deadline` - The deadline for proposing the block. - /// * `block_size_limit` - The maximum size of the block in bytes. - /// - /// - /// The function follows these general steps: - /// 1. Starts a timer to measure the total time it takes to create the proposal. - /// 2. Initializes a new block at the parent hash with the given inherent digests. - /// 3. Iterates over the inherents and pushes them into the block builder. Handles any potential - /// errors. - /// 4. Sets up the soft deadline and starts the block timer. - /// 5. Gets an iterator over the pending transactions and iterates over them. - /// 6. Checks the deadline and handles the case when the deadline is reached. - /// 7. Checks the block size limit and handles cases where transactions would cause the block to - /// exceed the limit. - /// 8. Attempts to push the transaction into the block and handles any - /// potential errors. - /// 9. If the block size limit was reached without adding any transaction, - /// it logs a warning. - /// 10. Removes invalid transactions from the pool. - /// 11. Builds the block and updates the metrics. - /// 12. Converts the storage proof to the required format. - /// 13. Measures the total time it took to create the proposal and updates the corresponding - /// metric. - /// 14. Returns a new `Proposal` with the block, proof, and storage changes. - /// - /// # Errors - /// - /// This function will return an error if: - /// - The block cannot be created at the parent hash. - /// - Any of the inherents cannot be pushed into the block builder. - /// - The block cannot be built. - /// - The storage proof cannot be converted into the required format. - async fn propose_with( - self, - inherent_data: InherentData, - inherent_digests: Digest, - deadline: time::Instant, - block_size_limit: Option, - ) -> Result, PR::Proof>, sp_blockchain::Error> { - // Start the timer to measure the total time it takes to create the proposal. - let propose_with_timer = time::Instant::now(); - - // Initialize a new block builder at the parent hash with the given inherent digests. - let mut block_builder = self.client.new_block_at(self.parent_hash, inherent_digests, PR::ENABLED)?; - - self.apply_inherents(&mut block_builder, inherent_data)?; - - let block_timer = time::Instant::now(); - - // Apply transactions and record the reason why we stopped. - let end_reason = self.apply_extrinsics(&mut block_builder, deadline, block_size_limit).await?; - - // Build the block. - let (block, storage_changes, proof) = block_builder.build()?.into_inner(); - - // Measure the total time it took to build the block. - let block_took = block_timer.elapsed(); - - // Convert the storage proof into the required format. - let proof = PR::into_proof(proof).map_err(|e| sp_blockchain::Error::Application(Box::new(e)))?; - - // Print the summary of the proposal. - self.print_summary(&block, end_reason, block_took, propose_with_timer.elapsed()); - Ok(Proposal { block, proof, storage_changes }) - } - - /// Apply all inherents to the block. - /// This function will return an error if any of the inherents cannot be pushed into the block - /// builder. It will also update the metrics. - /// # Arguments - /// * `block_builder` - The block builder to push the inherents into. - /// * `inherent_data` - The inherents to push into the block builder. - /// # Returns - /// This function will return `Ok(())` if all inherents were pushed into the block builder. - /// # Errors - /// This function will return an error if any of the inherents cannot be pushed into the block - /// builder. - fn apply_inherents( - &self, - block_builder: &mut sc_block_builder::BlockBuilder<'_, Block, C, B>, - inherent_data: InherentData, - ) -> Result<(), sp_blockchain::Error> { - let create_inherents_start = time::Instant::now(); - let inherents = block_builder.create_inherents(inherent_data)?; - let create_inherents_end = time::Instant::now(); - - self.metrics.report(|metrics| { - metrics - .create_inherents_time - .observe(create_inherents_end.saturating_duration_since(create_inherents_start).as_secs_f64()); - }); - - for inherent in inherents { - match block_builder.push(inherent) { - Err(ApplyExtrinsicFailed(Validity(e))) if e.exhausted_resources() => { - warn!(target: LOG_TARGET, "⚠️ Dropping non-mandatory inherent from overweight block.") - } - Err(ApplyExtrinsicFailed(Validity(e))) if e.was_mandatory() => { - error!("❌️ Mandatory inherent extrinsic returned error. Block cannot be produced."); - return Err(ApplyExtrinsicFailed(Validity(e))); - } - Err(e) => { - warn!(target: LOG_TARGET, "❗️ Inherent extrinsic returned unexpected error: {}. Dropping.", e); - } - Ok(_) => {} - } - } - Ok(()) - } - - /// Apply as many extrinsics as possible to the block. - /// This function will return an error if the block cannot be built. - /// # Arguments - /// * `block_builder` - The block builder to push the extrinsics into. - /// * `deadline` - The deadline to stop applying extrinsics. - /// * `block_size_limit` - The maximum size of the block. - /// # Returns - /// The reason why we stopped applying extrinsics. - /// # Errors - /// This function will return an error if the block cannot be built. - async fn apply_extrinsics( - &self, - block_builder: &mut sc_block_builder::BlockBuilder<'_, Block, C, B>, - deadline: time::Instant, - block_size_limit: Option, - ) -> Result { - // proceed with transactions - // We calculate soft deadline used only in case we start skipping transactions. - let now = (self.now)(); - let left = deadline.saturating_duration_since(now); - let left_micros: u64 = left.as_micros().saturated_into(); - let soft_deadline = now + time::Duration::from_micros(self.soft_deadline_percent.mul_floor(left_micros)); - let mut skipped = 0; - let mut unqueue_invalid = Vec::new(); - - let mut t1 = self.transaction_pool.ready_at(self.parent_number).fuse(); - let mut t2 = futures_timer::Delay::new(deadline.saturating_duration_since((self.now)()) / 8).fuse(); - - let mut pending_iterator = select! { - res = t1 => res, - _ = t2 => { - warn!(target: LOG_TARGET, - "Timeout fired waiting for transaction pool at block #{}. \ - Proceeding with production.", - self.parent_number, - ); - self.transaction_pool.ready() - }, - }; - - let block_size_limit = block_size_limit.unwrap_or(self.default_block_size_limit); - - debug!(target: LOG_TARGET, "Attempting to push transactions from the pool."); - debug!(target: LOG_TARGET, "Pool status: {:?}", self.transaction_pool.status()); - let mut transaction_pushed = false; - - let end_reason = loop { - let pending_tx = if let Some(pending_tx) = pending_iterator.next() { - pending_tx - } else { - break EndProposingReason::NoMoreTransactions; - }; - - let now = (self.now)(); - if now > deadline { - debug!( - target: LOG_TARGET, - "Consensus deadline reached when pushing block transactions, proceeding with proposing." - ); - break EndProposingReason::HitDeadline; - } - - let pending_tx_data = pending_tx.data().clone(); - let pending_tx_hash = pending_tx.hash().clone(); - - let block_size = block_builder.estimate_block_size(false); - if block_size + pending_tx_data.encoded_size() > block_size_limit { - pending_iterator.report_invalid(&pending_tx); - if skipped < MAX_SKIPPED_TRANSACTIONS { - skipped += 1; - debug!( - target: LOG_TARGET, - "Transaction would overflow the block size limit, but will try {} more transactions before \ - quitting.", - MAX_SKIPPED_TRANSACTIONS - skipped, - ); - continue; - } else if now < soft_deadline { - debug!( - target: LOG_TARGET, - "Transaction would overflow the block size limit, but we still have time before the soft \ - deadline, so we will try a bit more." - ); - continue; - } else { - debug!(target: LOG_TARGET, "Reached block size limit, proceeding with proposing."); - break EndProposingReason::HitBlockSizeLimit; - } - } - - trace!(target: LOG_TARGET, "[{:?}] Pushing to the block.", pending_tx_hash); - match sc_block_builder::BlockBuilder::push(block_builder, pending_tx_data) { - Ok(()) => { - transaction_pushed = true; - debug!(target: LOG_TARGET, "[{:?}] Pushed to the block.", pending_tx_hash); - } - Err(ApplyExtrinsicFailed(Validity(e))) if e.exhausted_resources() => { - pending_iterator.report_invalid(&pending_tx); - if skipped < MAX_SKIPPED_TRANSACTIONS { - skipped += 1; - debug!( - target: LOG_TARGET, - "Block seems full, but will try {} more transactions before quitting.", - MAX_SKIPPED_TRANSACTIONS - skipped, - ); - } else if (self.now)() < soft_deadline { - debug!( - target: LOG_TARGET, - "Block seems full, but we still have time before the soft deadline, so we will try a bit \ - more before quitting." - ); - } else { - debug!(target: LOG_TARGET, "Reached block weight limit, proceeding with proposing."); - break EndProposingReason::HitBlockWeightLimit; - } - } - Err(e) => { - pending_iterator.report_invalid(&pending_tx); - debug!(target: LOG_TARGET, "[{:?}] Invalid transaction: {}", pending_tx_hash, e); - unqueue_invalid.push(pending_tx_hash); - } - } - }; - - if matches!(end_reason, EndProposingReason::HitBlockSizeLimit) && !transaction_pushed { - warn!( - target: LOG_TARGET, - "Hit block size limit of `{}` without including any transaction!", block_size_limit, - ); - } - - self.transaction_pool.remove_invalid(&unqueue_invalid); - Ok(end_reason) - } - - /// Prints a summary and does telemetry + metrics. - /// This is called after the block is created. - /// # Arguments - /// * `block` - The block that was created. - /// * `end_reason` - The reason why we stopped adding transactions to the block. - /// * `block_took` - The time it took to create the block. - /// * `propose_with_took` - The time it took to propose the block. - fn print_summary( - &self, - block: &Block, - end_reason: EndProposingReason, - block_took: time::Duration, - propose_with_took: time::Duration, - ) { - let extrinsics = block.extrinsics(); - self.metrics.report(|metrics| { - metrics.number_of_transactions.set(extrinsics.len() as u64); - metrics.block_constructed.observe(block_took.as_secs_f64()); - metrics.report_end_proposing_reason(end_reason); - metrics.create_block_proposal_time.observe(propose_with_took.as_secs_f64()); - }); - } -} - -#[cfg(test)] -mod tests { - - use futures::executor::block_on; - use parking_lot::Mutex; - use sc_client_api::Backend; - use sc_transaction_pool::BasicPool; - use sc_transaction_pool_api::{ChainEvent, MaintainedTransactionPool, TransactionSource}; - use sp_api::Core; - use sp_blockchain::HeaderBackend; - use sp_consensus::{BlockOrigin, Environment, Proposer}; - use sp_runtime::generic::BlockId; - use sp_runtime::traits::NumberFor; - use sp_runtime::Perbill; - use substrate_test_runtime_client::prelude::*; - use substrate_test_runtime_client::runtime::{Block as TestBlock, Extrinsic, ExtrinsicBuilder, Transfer}; - use substrate_test_runtime_client::{TestClientBuilder, TestClientBuilderExt}; - - use super::*; - - const SOURCE: TransactionSource = TransactionSource::External; - - // Note: - // Maximum normal extrinsic size for `substrate_test_runtime` is ~65% of max_block (refer to - // `substrate_test_runtime::RuntimeBlockWeights` for details). - // This extrinsic sizing allows for: - // - one huge xts + a lot of tiny dust - // - one huge, no medium, - // - two medium xts - // This is widely exploited in following tests. - const HUGE: u32 = 649_000_000; - const MEDIUM: u32 = 250_000_000; - const TINY: u32 = 1_000; - - fn extrinsic(nonce: u64) -> Extrinsic { - ExtrinsicBuilder::new_fill_block(Perbill::from_parts(TINY)).nonce(nonce).build() - } - - fn chain_event(header: B::Header) -> ChainEvent - where - NumberFor: From, - { - ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None } - } - - #[test] - fn should_cease_building_block_when_deadline_is_reached() { - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - - block_on(txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0), extrinsic(1)])).unwrap(); - - block_on( - txpool.maintain(chain_event( - client.expect_header(client.info().genesis_hash).expect("there should be header"), - )), - ); - - let mut proposer_factory = ProposerFactory::new(spawner, client.clone(), txpool.clone(), None); - - let cell = Mutex::new((false, time::Instant::now())); - let proposer = proposer_factory.init_with_now( - &client.expect_header(client.info().genesis_hash).unwrap(), - Box::new(move || { - let mut value = cell.lock(); - if !value.0 { - value.0 = true; - return value.1; - } - let old = value.1; - let new = old + time::Duration::from_secs(1); - *value = (true, new); - old - }), - ); - - // when - let deadline = time::Duration::from_secs(3); - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) - .map(|r| r.block) - .unwrap(); - - // then - // block should have some extrinsics although we have some more in the pool. - assert_eq!(block.extrinsics().len(), 1); - assert_eq!(txpool.ready().count(), 2); - } - - #[test] - fn should_not_panic_when_deadline_is_reached() { - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - - let mut proposer_factory = ProposerFactory::new(spawner, client.clone(), txpool, None); - - let cell = Mutex::new((false, time::Instant::now())); - let proposer = proposer_factory.init_with_now( - &client.expect_header(client.info().genesis_hash).unwrap(), - Box::new(move || { - let mut value = cell.lock(); - if !value.0 { - value.0 = true; - return value.1; - } - let new = value.1 + time::Duration::from_secs(160); - *value = (true, new); - new - }), - ); - - let deadline = time::Duration::from_secs(1); - - block_on(proposer.propose(Default::default(), Default::default(), deadline, None)).map(|r| r.block).unwrap(); - } - - #[test] - fn proposed_storage_changes_should_match_execute_block_storage_changes() { - let (client, backend) = TestClientBuilder::new().build_with_backend(); - let client = Arc::new(client); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - - let genesis_hash = client.info().best_hash; - - block_on(txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0)])).unwrap(); - - block_on( - txpool.maintain(chain_event( - client.expect_header(client.info().genesis_hash).expect("there should be header"), - )), - ); - - let mut proposer_factory = ProposerFactory::new(spawner, client.clone(), txpool, None); - - let proposer = proposer_factory - .init_with_now(&client.header(genesis_hash).unwrap().unwrap(), Box::new(time::Instant::now)); - - let deadline = time::Duration::from_secs(9); - let proposal = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)).unwrap(); - - assert_eq!(proposal.block.extrinsics().len(), 1); - - let api = client.runtime_api(); - api.execute_block(genesis_hash, proposal.block).unwrap(); - - let state = backend.state_at(genesis_hash).unwrap(); - - let storage_changes = api.into_storage_changes(&state, genesis_hash).unwrap(); - - assert_eq!(proposal.storage_changes.transaction_storage_root, storage_changes.transaction_storage_root,); - } - - // This test ensures that if one transaction of a user was rejected, because for example - // the weight limit was hit, we don't mark the other transactions of the user as invalid because - // the nonce is not matching. - #[test] - fn should_not_remove_invalid_transactions_from_the_same_sender_after_one_was_invalid() { - // given - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - - let medium = |nonce| ExtrinsicBuilder::new_fill_block(Perbill::from_parts(MEDIUM)).nonce(nonce).build(); - let huge = |nonce| ExtrinsicBuilder::new_fill_block(Perbill::from_parts(HUGE)).nonce(nonce).build(); - - block_on(txpool.submit_at( - &BlockId::number(0), - SOURCE, - vec![medium(0), medium(1), huge(2), medium(3), huge(4), medium(5), medium(6)], - )) - .unwrap(); - - let mut proposer_factory = ProposerFactory::new(spawner, client.clone(), txpool.clone(), None); - let mut propose_block = - |client: &TestClient, parent_number, expected_block_extrinsics, expected_pool_transactions| { - let hash = client.expect_block_hash_from_id(&BlockId::Number(parent_number)).unwrap(); - let proposer = - proposer_factory.init_with_now(&client.expect_header(hash).unwrap(), Box::new(time::Instant::now)); - - // when - let deadline = time::Duration::from_secs(900); - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) - .map(|r| r.block) - .unwrap(); - - // then - // block should have some extrinsics although we have some more in the pool. - assert_eq!(txpool.ready().count(), expected_pool_transactions, "at block: {}", block.header.number); - assert_eq!(block.extrinsics().len(), expected_block_extrinsics, "at block: {}", block.header.number); - - block - }; - - let import_and_maintain = |mut client: Arc, block: TestBlock| { - let hash = block.hash(); - block_on(client.import(BlockOrigin::Own, block)).unwrap(); - block_on(txpool.maintain(chain_event(client.expect_header(hash).expect("there should be header")))); - }; - - block_on( - txpool.maintain(chain_event( - client.expect_header(client.info().genesis_hash).expect("there should be header"), - )), - ); - assert_eq!(txpool.ready().count(), 7); - - // let's create one block and import it - let block = propose_block(&client, 0, 2, 7); - import_and_maintain(client.clone(), block); - assert_eq!(txpool.ready().count(), 5); - - // now let's make sure that we can still make some progress - let block = propose_block(&client, 1, 1, 5); - import_and_maintain(client.clone(), block); - assert_eq!(txpool.ready().count(), 4); - - // again let's make sure that we can still make some progress - let block = propose_block(&client, 2, 1, 4); - import_and_maintain(client.clone(), block); - assert_eq!(txpool.ready().count(), 3); - - // again let's make sure that we can still make some progress - let block = propose_block(&client, 3, 1, 3); - import_and_maintain(client.clone(), block); - assert_eq!(txpool.ready().count(), 2); - - // again let's make sure that we can still make some progress - let block = propose_block(&client, 4, 2, 2); - import_and_maintain(client.clone(), block); - assert_eq!(txpool.ready().count(), 0); - } - - #[test] - fn should_keep_adding_transactions_after_exhausts_resources_before_soft_deadline() { - // given - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - - let tiny = |nonce| ExtrinsicBuilder::new_fill_block(Perbill::from_parts(TINY)).nonce(nonce).build(); - let huge = |who| { - ExtrinsicBuilder::new_fill_block(Perbill::from_parts(HUGE)).signer(AccountKeyring::numeric(who)).build() - }; - - block_on(txpool.submit_at( - &BlockId::number(0), - SOURCE, - // add 2 * MAX_SKIPPED_TRANSACTIONS that exhaust resources - (0..MAX_SKIPPED_TRANSACTIONS * 2) - .map(huge) - // and some transactions that are okay. - .chain((0..MAX_SKIPPED_TRANSACTIONS as u64).map(tiny)) - .collect(), - )) - .unwrap(); - - block_on( - txpool.maintain(chain_event( - client.expect_header(client.info().genesis_hash).expect("there should be header"), - )), - ); - assert_eq!(txpool.ready().count(), MAX_SKIPPED_TRANSACTIONS * 3); - - let mut proposer_factory = ProposerFactory::new(spawner, client.clone(), txpool, None); - - let cell = Mutex::new(time::Instant::now()); - let proposer = proposer_factory.init_with_now( - &client.expect_header(client.info().genesis_hash).unwrap(), - Box::new(move || { - let mut value = cell.lock(); - let old = *value; - *value = old + time::Duration::from_secs(1); - old - }), - ); - - // when - // give it enough time so that deadline is never triggered. - let deadline = time::Duration::from_secs(900); - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) - .map(|r| r.block) - .unwrap(); - - // then block should have all non-exhaust resources extrinsics (+ the first one). - assert_eq!(block.extrinsics().len(), MAX_SKIPPED_TRANSACTIONS + 1); - } - - #[test] - fn should_only_skip_up_to_some_limit_after_soft_deadline() { - // given - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - - let tiny = |who| { - ExtrinsicBuilder::new_fill_block(Perbill::from_parts(TINY)) - .signer(AccountKeyring::numeric(who)) - .nonce(1) - .build() - }; - let huge = |who| { - ExtrinsicBuilder::new_fill_block(Perbill::from_parts(HUGE)).signer(AccountKeyring::numeric(who)).build() - }; - - block_on(txpool.submit_at( - &BlockId::number(0), - SOURCE, - (0..MAX_SKIPPED_TRANSACTIONS + 2) - .map(huge) - // and some transactions that are okay. - .chain((0..MAX_SKIPPED_TRANSACTIONS + 2).map(tiny)) - .collect(), - )) - .unwrap(); - - block_on( - txpool.maintain(chain_event( - client.expect_header(client.info().genesis_hash).expect("there should be header"), - )), - ); - assert_eq!(txpool.ready().count(), MAX_SKIPPED_TRANSACTIONS * 2 + 4); - - let mut proposer_factory = ProposerFactory::new(spawner, client.clone(), txpool, None); - - let deadline = time::Duration::from_secs(600); - let cell = Arc::new(Mutex::new((0, time::Instant::now()))); - let cell2 = cell.clone(); - let proposer = proposer_factory.init_with_now( - &client.expect_header(client.info().genesis_hash).unwrap(), - Box::new(move || { - let mut value = cell.lock(); - let (called, old) = *value; - // add time after deadline is calculated internally (hence 1) - let increase = if called == 1 { - // we start after the soft_deadline should have already been reached. - deadline / 2 - } else { - // but we make sure to never reach the actual deadline - time::Duration::from_millis(0) - }; - *value = (called + 1, old + increase); - old - }), - ); - - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) - .map(|r| r.block) - .unwrap(); - - // then the block should have one or two transactions. This maybe random as they are - // processed in parallel. The same signer and consecutive nonces for huge and tiny - // transactions guarantees that max two transactions will get to the block. - assert!((1..3).contains(&block.extrinsics().len()), "Block shall contain one or two extrinsics."); - assert!( - cell2.lock().0 > MAX_SKIPPED_TRANSACTIONS, - "Not enough calls to current time, which indicates the test might have ended because of deadline, not \ - soft deadline" - ); - } - - #[test] - fn should_cease_building_block_when_block_limit_is_reached() { - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - let genesis_header = client.expect_header(client.info().genesis_hash).expect("there should be header"); - - let extrinsics_num = 5; - let extrinsics = std::iter::once( - Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Bob.into(), amount: 100, nonce: 0 } - .into_unchecked_extrinsic(), - ) - .chain((1..extrinsics_num as u64).map(extrinsic)) - .collect::>(); - - let block_limit = genesis_header.encoded_size() - + extrinsics.iter().take(extrinsics_num - 1).map(Encode::encoded_size).sum::() - + Vec::::new().encoded_size(); - - block_on(txpool.submit_at(&BlockId::number(0), SOURCE, extrinsics)).unwrap(); - - block_on(txpool.maintain(chain_event(genesis_header.clone()))); - - let mut proposer_factory = ProposerFactory::new(spawner, client, txpool, None); - - let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); - - // Give it enough time - let deadline = time::Duration::from_secs(300); - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, Some(block_limit))) - .map(|r| r.block) - .unwrap(); - - // Based on the block limit, one transaction shouldn't be included. - assert_eq!(block.extrinsics().len(), extrinsics_num - 1); - - let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); - - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) - .map(|r| r.block) - .unwrap(); - - // Without a block limit it should include all of them - assert_eq!(block.extrinsics().len(), extrinsics_num); - } - - #[test] - fn proposer_factory_can_update_default_block_size_limit() { - let client = Arc::new(substrate_test_runtime_client::new()); - let spawner = sp_core::testing::TaskExecutor::new(); - let txpool = BasicPool::new_full(Default::default(), true.into(), None, spawner.clone(), client.clone()); - let genesis_header = client.expect_header(client.info().genesis_hash).expect("there should be header"); - - let extrinsics_num = 5; - let extrinsics = std::iter::once( - Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Bob.into(), amount: 100, nonce: 0 } - .into_unchecked_extrinsic(), - ) - .chain((1..extrinsics_num as u64).map(extrinsic)) - .collect::>(); - - let block_limit = genesis_header.encoded_size() - + extrinsics.iter().take(extrinsics_num - 1).map(Encode::encoded_size).sum::() - + Vec::::new().encoded_size(); - - block_on(txpool.submit_at(&BlockId::number(0), SOURCE, extrinsics)).unwrap(); - - block_on(txpool.maintain(chain_event(genesis_header.clone()))); - - let mut proposer_factory = ProposerFactory::new(spawner, client, txpool, None); - proposer_factory.set_default_block_size_limit(block_limit); - - let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); - - // Give it enough time - let deadline = time::Duration::from_secs(300); - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, Default::default())) - .map(|r| r.block) - .unwrap(); - - // Based on the block limit, one transaction shouldn't be included. - assert_eq!(block.extrinsics().len(), extrinsics_num - 1); - - // increase block size limit - proposer_factory.set_default_block_size_limit(block_limit * 2); - let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); - - let block = block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) - .map(|r| r.block) - .unwrap(); - - // with increased blocklimit we should include all of them - assert_eq!(block.extrinsics().len(), extrinsics_num); - } -} diff --git a/crates/client/data-availability/Cargo.toml b/crates/client/data-availability/Cargo.toml index f8acb853c5..f80f7f0a3f 100644 --- a/crates/client/data-availability/Cargo.toml +++ b/crates/client/data-availability/Cargo.toml @@ -45,7 +45,7 @@ starknet_api = { workspace = true, default-features = true } ethers = "2.0.7" # Avail subxt dependency -avail-subxt = { git = "https://github.com/availproject/avail", version = "0.3.0", rev = "0958c6ed499497b70a33ab072dcbe86c762f3976" } +avail-subxt = { git = "https://github.com/availproject/avail", version = "0.4.0", tag = "v1.8.0.0" } sp-keyring = { workspace = true } subxt = "0.29" diff --git a/crates/client/data-availability/src/avail/config.rs b/crates/client/data-availability/src/avail/config.rs index 378caca379..853c9eabc8 100644 --- a/crates/client/data-availability/src/avail/config.rs +++ b/crates/client/data-availability/src/avail/config.rs @@ -5,9 +5,9 @@ use serde::Deserialize; use crate::DaMode; -const DEFAULT_AVAIL_WS: &str = "wss://kate.avail.tools:443/ws"; +const DEFAULT_AVAIL_WS: &str = "ws://127.0.0.1:9945"; const DEFAULT_APP_ID: u32 = 0; -const DEFAULT_AVAIL_VALIDATE_CODEGEN: bool = true; +const DEFAULT_AVAIL_VALIDATE_CODEGEN: bool = false; const DEFAULT_AVAIL_SEED: &str = "//Alice"; #[derive(Clone, PartialEq, Deserialize, Debug)] diff --git a/crates/client/data-availability/src/avail/mod.rs b/crates/client/data-availability/src/avail/mod.rs index 88608a55ec..6e86fbd139 100644 --- a/crates/client/data-availability/src/avail/mod.rs +++ b/crates/client/data-availability/src/avail/mod.rs @@ -1,16 +1,18 @@ pub mod config; -use anyhow::Result; +use std::sync::Arc; + +use anyhow::{anyhow, Result}; use async_trait::async_trait; use avail_subxt::api::runtime_types::avail_core::AppId; -use avail_subxt::api::runtime_types::da_control::pallet::Call as DaCall; -use avail_subxt::api::runtime_types::sp_core::bounded::bounded_vec::BoundedVec; -use avail_subxt::avail::{AppUncheckedExtrinsic, Client as AvailSubxtClient}; +use avail_subxt::api::runtime_types::bounded_collections::bounded_vec::BoundedVec; +use avail_subxt::avail::Client as AvailSubxtClient; use avail_subxt::primitives::AvailExtrinsicParams; -use avail_subxt::{api as AvailApi, build_client, AvailConfig, Call}; +use avail_subxt::{api as AvailApi, build_client, AvailConfig}; use ethers::types::{I256, U256}; -use sp_core::H256; +use futures::lock::Mutex; use subxt::ext::sp_core::sr25519::Pair; +use subxt::OnlineClient; use crate::utils::get_bytes_from_state_diff; use crate::{DaClient, DaMode}; @@ -19,21 +21,55 @@ type AvailPairSigner = subxt::tx::PairSigner; #[derive(Clone)] pub struct AvailClient { - ws_client: AvailSubxtClient, + ws_client: Arc>, app_id: AppId, signer: AvailPairSigner, mode: DaMode, } +pub struct SubxtClient { + client: AvailSubxtClient, + config: config::AvailConfig, +} + +pub fn try_build_avail_subxt(conf: &config::AvailConfig) -> Result> { + let client = + futures::executor::block_on(async { build_client(conf.ws_provider.as_str(), conf.validate_codegen).await }) + .map_err(|e| anyhow::anyhow!("DA Layer error: could not initialize ws endpoint {e}"))?; + + Ok(client) +} + +impl SubxtClient { + pub async fn restart(&mut self) -> Result<(), anyhow::Error> { + self.client = match build_client(self.config.ws_provider.as_str(), self.config.validate_codegen).await { + Ok(i) => i, + Err(e) => return Err(anyhow!("DA Layer error: could not restart ws endpoint {e}")), + }; + + Ok(()) + } + + pub fn client(&self) -> &OnlineClient { + &self.client + } +} + +impl TryFrom for SubxtClient { + type Error = anyhow::Error; + + fn try_from(conf: config::AvailConfig) -> Result { + Ok(Self { client: try_build_avail_subxt(&conf)?, config: conf }) + } +} + #[async_trait] impl DaClient for AvailClient { async fn publish_state_diff(&self, state_diff: Vec) -> Result<()> { let bytes = get_bytes_from_state_diff(&state_diff); let bytes = BoundedVec(bytes); + self.publish_data(&bytes).await?; - let submitted_block_hash = self.publish_data(&bytes).await?; - - self.verify_bytes_inclusion(submitted_block_hash, &bytes).await?; Ok(()) } @@ -49,38 +85,22 @@ impl DaClient for AvailClient { } impl AvailClient { - async fn publish_data(&self, bytes: &BoundedVec) -> Result { + async fn publish_data(&self, bytes: &BoundedVec) -> Result<()> { + let mut ws_client = self.ws_client.lock().await; + let data_transfer = AvailApi::tx().data_availability().submit_data(bytes.clone()); let extrinsic_params = AvailExtrinsicParams::new_with_app_id(self.app_id); - let events = self - .ws_client - .tx() - .sign_and_submit_then_watch(&data_transfer, &self.signer, extrinsic_params) - .await? - .wait_for_finalized_success() - .await?; - - Ok(events.block_hash()) - } - async fn verify_bytes_inclusion(&self, block_hash: H256, bytes: &BoundedVec) -> Result<()> { - let submitted_block = self - .ws_client - .rpc() - .block(Some(block_hash)) - .await? - .ok_or(anyhow::anyhow!("Invalid hash, block not found"))?; - - submitted_block - .block - .extrinsics - .into_iter() - .filter_map(|chain_block_ext| AppUncheckedExtrinsic::try_from(chain_block_ext).map(|ext| ext.function).ok()) - .find(|call| match call { - Call::DataAvailability(DaCall::submit_data { data }) => data == bytes, - _ => false, - }) - .ok_or(anyhow::anyhow!("Bytes not found in specified block"))?; + match ws_client.client().tx().sign_and_submit(&data_transfer, &self.signer, extrinsic_params).await { + Ok(i) => i, + Err(e) => { + if e.to_string().contains("restart required") { + let _ = ws_client.restart().await; + } + + return Err(anyhow!("DA Layer error : failed due to closed websocket connection {e}")); + } + }; Ok(()) } @@ -94,11 +114,12 @@ impl TryFrom for AvailClient { let app_id = AppId(conf.app_id); - let ws_client = - futures::executor::block_on(async { build_client(conf.ws_provider.as_str(), conf.validate_codegen).await }) - .map_err(|e| anyhow::anyhow!("could not initialize ws endpoint {e}"))?; - - Ok(Self { ws_client, app_id, signer, mode: conf.mode }) + Ok(Self { + ws_client: Arc::new(Mutex::new(SubxtClient::try_from(conf.clone())?)), + app_id, + signer, + mode: conf.mode, + }) } } diff --git a/crates/client/deoxys/Cargo.toml b/crates/client/deoxys/Cargo.toml index 281d7876a1..bbf8f4ba2e 100644 --- a/crates/client/deoxys/Cargo.toml +++ b/crates/client/deoxys/Cargo.toml @@ -12,6 +12,10 @@ license = "MIT" publish = false repository = "https://github.com/KasarLabs/deoxys" +[features] +default = ["m"] +m = ["dep:rodio"] + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -21,10 +25,12 @@ reqwest = "0.11" serde_json = "1" futures-channel.workspace = true +rodio = { version = "0.17", optional = true } + blockifier = { workspace = true, default-features = false, features = [ "testing", ] } -env_logger = "0.9.0" +env_logger = "0.10.0" log = { version = "0.4.14" } mockito = { workspace = true } pallet-starknet = { workspace = true } diff --git a/crates/client/deoxys/src/fetch.rs b/crates/client/deoxys/src/fetch.rs index 3b70eab3b5..63ebc846dd 100644 --- a/crates/client/deoxys/src/fetch.rs +++ b/crates/client/deoxys/src/fetch.rs @@ -22,6 +22,8 @@ pub struct BlockFetchConfig { pub chain_id: starknet_ff::FieldElement, /// The number of tasks spawned to fetch blocks. pub workers: u32, + /// Whether to play a sound when a new block is fetched. + pub sound: bool, } /// Used to determine which Ids are required to be fetched. @@ -166,5 +168,11 @@ async fn create_block(command_sink: &mut CommandSink, _parent_hash: Option .await .map_err(|err| format!("failed to seal block: {err}"))? .map_err(|err| format!("failed to seal block: {err}"))?; + + #[cfg(feature = "m")] + { + super::m::play_note(create_block_info.hash.to_low_u64_ne()); + } + Ok(create_block_info.hash) } diff --git a/crates/client/deoxys/src/lib.rs b/crates/client/deoxys/src/lib.rs index 551219f847..eb3767d1c5 100644 --- a/crates/client/deoxys/src/lib.rs +++ b/crates/client/deoxys/src/lib.rs @@ -3,12 +3,21 @@ mod convert; mod fetch; mod utility; +#[cfg(feature = "m")] +mod m; pub use fetch::BlockFetchConfig; type CommandSink = futures_channel::mpsc::Sender>; pub async fn fetch_block(command_sink: CommandSink, sender: tokio::sync::mpsc::Sender, fetch_config: BlockFetchConfig, rpc_port: u16) { + #[cfg(feature = "m")] + { + if fetch_config.sound { + m::init(); + } + } + let first_block = utility::get_last_synced_block(rpc_port).await + 1; fetch::fetch_blocks(command_sink, sender, fetch_config, first_block).await; } diff --git a/crates/client/deoxys/src/m.rs b/crates/client/deoxys/src/m.rs new file mode 100644 index 0000000000..3d7c113662 --- /dev/null +++ b/crates/client/deoxys/src/m.rs @@ -0,0 +1,48 @@ +use std::sync::OnceLock; +use std::time::Duration; + +use rodio::{Source, Sink}; + +static SINK: OnceLock = OnceLock::new(); + +/// Runs the M thread. +/// +/// If an error occurs, the thread just stops silently. +pub fn init() { + let Ok((stream, handle)) = rodio::OutputStream::try_default() else { return }; + std::mem::forget(stream); + let Ok(sink) = Sink::try_new(&handle) else { return }; + sink.set_volume(0.8); + SINK.set(sink).ok().unwrap(); +} + +struct Note { + duration: f64, + frequency: f64, +} + +impl Note { + pub fn from_hash(hash: u64) -> Self { + const NOTE_COUNT: u64 = 36; + const BASE_FREQ: f64 = 100.0; + + let frequency = BASE_FREQ * 2f64.powf((hash % NOTE_COUNT) as f64 / 12.0); + + Self { + duration: frequency / 500.0, + frequency, + } + } +} + +/// Play a note with the provided hash. +pub fn play_note(hash: u64) { + let Some(sink) = SINK.get() else { return }; + + let note = Note::from_hash(hash); + let source = rodio::source::SineWave::new(note.frequency as f32); + + sink.clear(); + sink.play(); + sink.append(source); +} diff --git a/crates/client/rpc-core/src/lib.rs b/crates/client/rpc-core/src/lib.rs index 938fd3661c..2ceb059083 100644 --- a/crates/client/rpc-core/src/lib.rs +++ b/crates/client/rpc-core/src/lib.rs @@ -134,5 +134,8 @@ pub trait StarknetRpcApi { /// Returns the receipt of a transaction by transaction hash. #[method(name = "getTransactionReceipt")] - fn get_transaction_receipt(&self, transaction_hash: FieldElement) -> RpcResult; + async fn get_transaction_receipt( + &self, + transaction_hash: FieldElement, + ) -> RpcResult; } diff --git a/crates/client/rpc/Cargo.toml b/crates/client/rpc/Cargo.toml index 9abfca0a5a..1a2542f1cc 100644 --- a/crates/client/rpc/Cargo.toml +++ b/crates/client/rpc/Cargo.toml @@ -22,10 +22,10 @@ pallet-starknet = { workspace = true, default-features = true } mc-db = { workspace = true } mc-rpc-core = { workspace = true } mc-storage = { workspace = true } -mc-transaction-pool = { workspace = true } # Substate primitives frame-support = { workspace = true } frame-system = { workspace = true } +sc-transaction-pool = { workspace = true } sc-transaction-pool-api = { workspace = true } sp-api = { workspace = true, default-features = true } sp-arithmetic = { workspace = true, default-features = true } @@ -36,9 +36,7 @@ sp-runtime = { workspace = true, default-features = true } sc-client-api = { workspace = true, default-features = true } sc-network-sync = { workspace = true } # Starknet -blockifier = { workspace = true, default-features = false, features = [ - "testing", -] } +blockifier = { workspace = true, default-features = true } starknet-core = { workspace = true } starknet-ff = { workspace = true } starknet_api = { workspace = true, default-features = false } @@ -55,6 +53,7 @@ mp-hashers = { workspace = true } mp-transactions = { workspace = true, features = ["client"] } serde_json = { workspace = true, default-features = true } thiserror = { workspace = true } +tokio = { workspace = true, default-features = true, features = ["time"] } [dev-dependencies] rstest = { workspace = true } diff --git a/crates/client/rpc/src/events/mod.rs b/crates/client/rpc/src/events/mod.rs index fd55a8ce35..0ed82bcca4 100644 --- a/crates/client/rpc/src/events/mod.rs +++ b/crates/client/rpc/src/events/mod.rs @@ -7,12 +7,12 @@ use std::vec::IntoIter; use jsonrpsee::core::RpcResult; use log::error; use mc_rpc_core::utils::get_block_by_block_hash; -use mc_transaction_pool::ChainApi; use mp_felt::Felt252Wrapper; use mp_hashers::HasherT; use pallet_starknet::runtime_api::{ConvertTransactionRuntimeApi, StarknetRuntimeApi}; use sc_client_api::backend::{Backend, StorageProvider}; use sc_client_api::BlockBackend; +use sc_transaction_pool::ChainApi; use sp_api::ProvideRuntimeApi; use sp_blockchain::HeaderBackend; use sp_runtime::traits::Block as BlockT; diff --git a/crates/client/rpc/src/lib.rs b/crates/client/rpc/src/lib.rs index 7c1a095157..6791dac819 100644 --- a/crates/client/rpc/src/lib.rs +++ b/crates/client/rpc/src/lib.rs @@ -11,14 +11,15 @@ mod types; use std::marker::PhantomData; use std::sync::Arc; +use blockifier::execution::contract_address; use errors::StarknetRpcApiError; use jsonrpsee::core::{async_trait, RpcResult}; use log::error; +use mc_db::Backend as MadaraBackend; pub use mc_rpc_core::utils::*; use mc_rpc_core::Felt; pub use mc_rpc_core::StarknetRpcApiServer; use mc_storage::OverrideHandle; -use mc_transaction_pool::{ChainApi, Pool}; use mp_felt::Felt252Wrapper; use mp_hashers::HasherT; use mp_transactions::compute_hash::ComputeTransactionHash; @@ -28,13 +29,13 @@ use pallet_starknet::runtime_api::{ConvertTransactionRuntimeApi, StarknetRuntime use sc_client_api::backend::{Backend, StorageProvider}; use sc_client_api::BlockBackend; use sc_network_sync::SyncingService; +use sc_transaction_pool::{ChainApi, Pool}; use sc_transaction_pool_api::error::{Error as PoolError, IntoPoolError}; use sc_transaction_pool_api::{InPoolTransaction, TransactionPool, TransactionSource}; use sp_api::{ApiError, ProvideRuntimeApi}; use sp_arithmetic::traits::UniqueSaturatedInto; use sp_blockchain::HeaderBackend; use sp_core::H256; -use sp_runtime::generic::BlockId as SPBlockId; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; use sp_runtime::transaction_validity::InvalidTransaction; use sp_runtime::DispatchError; @@ -43,10 +44,10 @@ use starknet_core::types::{ BlockHashAndNumber, BlockId, BlockTag, BlockWithTxHashes, BlockWithTxs, BroadcastedDeclareTransaction, BroadcastedDeployAccountTransaction, BroadcastedInvokeTransaction, BroadcastedTransaction, ContractClass, DeclareTransactionReceipt, DeclareTransactionResult, DeployAccountTransactionReceipt, - DeployAccountTransactionResult, EventFilterWithPage, EventsPage, ExecutionResult, FeeEstimate, FieldElement, + DeployAccountTransactionResult, DeployTransactionReceipt, EventFilterWithPage, EventsPage, ExecutionResult, FeeEstimate, FieldElement, FunctionCall, InvokeTransactionReceipt, InvokeTransactionResult, L1HandlerTransactionReceipt, MaybePendingBlockWithTxHashes, MaybePendingBlockWithTxs, MaybePendingTransactionReceipt, StateDiff, StateUpdate, - SyncStatus, SyncStatusType, Transaction, TransactionFinalityStatus, TransactionReceipt, + SyncStatus, SyncStatusType, Transaction, TransactionFinalityStatus, TransactionReceipt, contract, }; use crate::constants::{MAX_EVENTS_CHUNK_SIZE, MAX_EVENTS_KEYS}; @@ -618,27 +619,33 @@ where let best_block_hash = self.client.info().best_hash; let chain_id = Felt252Wrapper(self.chain_id()?.0); - let mut estimates = vec![]; + let mut transactions = vec![]; for tx in request { let tx = tx.try_into().map_err(|e| { error!("Failed to convert BroadcastedTransaction to UserTransaction: {e}"); StarknetRpcApiError::InternalServerError })?; - let (actual_fee, gas_usage) = self - .client - .runtime_api() - .estimate_fee(substrate_block_hash, tx, is_query) - .map_err(|e| { - error!("Request parameters error: {e}"); - StarknetRpcApiError::InternalServerError - })? - .map_err(|e| { - error!("Failed to call function: {:#?}", e); - StarknetRpcApiError::ContractError - })?; - - estimates.push(FeeEstimate { gas_price: 0, gas_consumed: gas_usage, overall_fee: actual_fee }); + transactions.push(tx); } + + let fee_estimates = self + .client + .runtime_api() + .estimate_fee(substrate_block_hash, transactions) + .map_err(|e| { + error!("Request parameters error: {e}"); + StarknetRpcApiError::InternalServerError + })? + .map_err(|e| { + error!("Failed to call function: {:#?}", e); + StarknetRpcApiError::ContractError + })?; + + let estimates = fee_estimates + .into_iter() + .map(|x| FeeEstimate { gas_price: 0, gas_consumed: x.1, overall_fee: x.0 }) + .collect(); + Ok(estimates) } @@ -881,24 +888,53 @@ where /// # Arguments /// /// * `transaction_hash` - Transaction hash corresponding to the transaction. - fn get_transaction_receipt(&self, transaction_hash: FieldElement) -> RpcResult { - let block_hash_from_db = self - .backend - .mapping() - .block_hash_from_transaction_hash(H256::from(transaction_hash.to_bytes_be())) - .map_err(|e| { - error!("Failed to get transaction's substrate block hash from mapping_db: {e}"); - StarknetRpcApiError::TxnHashNotFound - })?; + async fn get_transaction_receipt( + &self, + transaction_hash: FieldElement, + ) -> RpcResult { + async fn wait_for_tx_inclusion( + madara_backend: Arc>, + transaction_hash: FieldElement, + ) -> Result<::Hash, StarknetRpcApiError> { + let substrate_block_hash; + + loop { + let block_hash_from_db = madara_backend + .mapping() + .block_hash_from_transaction_hash(H256::from(transaction_hash.to_bytes_be())) + .map_err(|e| { + error!("Failed to interact with db backend error: {e}"); + StarknetRpcApiError::InternalServerError + })?; + + match block_hash_from_db { + Some(block_hash) => { + substrate_block_hash = block_hash; + break; + } + None => { + // TODO: hardcoded to match the blocktime; make it dynamic + tokio::time::sleep(std::time::Duration::from_millis(6000)).await; + continue; + } + }; + } - let substrate_block_hash = match block_hash_from_db { - Some(block_hash) => block_hash, - None => { - // If the transaction is still in the pool, the receipt - // is not available, thus considered as not found. + Ok(substrate_block_hash) + } + + let substrate_block_hash = match tokio::time::timeout( + std::time::Duration::from_millis(60000), + wait_for_tx_inclusion(self.backend.clone(), transaction_hash), + ) + .await + { + Err(_) => { + error!("did not receive tx hash within 1 minute"); return Err(StarknetRpcApiError::TxnHashNotFound.into()); } - }; + Ok(res) => res, + }?; let block: mp_block::Block = get_block_by_block_hash(self.client.as_ref(), substrate_block_hash).unwrap_or_default(); @@ -914,20 +950,40 @@ where StarknetRpcApiError::InternalServerError })? .ok_or(StarknetRpcApiError::BlockNotFound)?; + let chain_id = self.chain_id()?.0.into(); - let (tx_type, events) = self + let fee_disabled = + self.client.runtime_api().is_transaction_fee_disabled(substrate_block_hash).map_err(|e| { + error!("Failed to get check fee disabled. Substrate block hash: {substrate_block_hash}, error: {e}"); + StarknetRpcApiError::InternalServerError + })?; + + let (tx_index, transaction) = self .client .runtime_api() - .get_events_for_tx_hash(substrate_block_hash, block_extrinsics, chain_id, transaction_hash.into()) + .get_index_and_tx_for_tx_hash(substrate_block_hash, block_extrinsics, chain_id, transaction_hash.into()) .map_err(|e| { error!( - "Failed to get events for transaction hash. Substrate block hash: {substrate_block_hash}, \ + "Failed to get index for transaction hash. Substrate block hash: {substrate_block_hash}, \ transaction hash: {transaction_hash}, error: {e}" ); StarknetRpcApiError::InternalServerError })? - .expect("the transaction should be present in the substrate extrinsics"); + .expect("the transaction should be present in the substrate extrinsics"); // not reachable + + let events = self + .client + .runtime_api() + .get_events_for_tx_by_index(substrate_block_hash, tx_index) + .map_err(|e| { + error!( + "Failed to get events for transaction index. Substrate block hash: {substrate_block_hash}, \ + transaction idx: {tx_index}, error: {e}" + ); + StarknetRpcApiError::InternalServerError + })? + .expect("the transaction should be present in the substrate extrinsics"); // not reachable let execution_result = { let revert_error = self @@ -957,48 +1013,80 @@ where } } - let receipt = match tx_type { - mp_transactions::TxType::Declare => TransactionReceipt::Declare(DeclareTransactionReceipt { + let events_converted: Vec = + events.clone().into_iter().map(event_conversion).collect(); + + let actual_fee = if fee_disabled { + FieldElement::ZERO + } else { + // Event { + // from_address: fee_token_address, + // keys: [selector("Transfer")], + // data: [ + // send_from_address, // account_contract_address + // send_to_address, // to (sequencer address) + // expected_fee_value_low, // transfer amount (fee) + // expected_fee_value_high, + // ]}, + // fee transfer must be the last event, except enabled disable-transaction-fee feature + events_converted.last().unwrap().data[2] + }; + + let receipt = match transaction { + mp_transactions::Transaction::Declare(_) => TransactionReceipt::Declare(DeclareTransactionReceipt { transaction_hash, - actual_fee: Default::default(), + actual_fee, finality_status: TransactionFinalityStatus::AcceptedOnL2, block_hash, block_number, messages_sent: Default::default(), - events: events.into_iter().map(event_conversion).collect(), + events: events_converted, execution_result, }), - mp_transactions::TxType::DeployAccount => { + mp_transactions::Transaction::DeployAccount(tx) => { TransactionReceipt::DeployAccount(DeployAccountTransactionReceipt { transaction_hash, - actual_fee: Default::default(), + actual_fee, finality_status: TransactionFinalityStatus::AcceptedOnL2, block_hash, block_number, messages_sent: Default::default(), - events: events.into_iter().map(event_conversion).collect(), - contract_address: Default::default(), // TODO: we can probably find this in the events + events: events_converted, + contract_address: tx.get_account_address(), execution_result, }) - } - mp_transactions::TxType::Invoke => TransactionReceipt::Invoke(InvokeTransactionReceipt { + }, + mp_transactions::Transaction::Deploy(tx) => { + TransactionReceipt::Deploy(DeployTransactionReceipt { + transaction_hash, + actual_fee, + finality_status: TransactionFinalityStatus::AcceptedOnL2, + block_hash, + block_number, + messages_sent: Default::default(), + events: events_converted, + contract_address: tx.get_account_address(), + execution_result, + }) + }, + mp_transactions::Transaction::Invoke(_) => TransactionReceipt::Invoke(InvokeTransactionReceipt { transaction_hash, - actual_fee: Default::default(), + actual_fee, finality_status: TransactionFinalityStatus::AcceptedOnL2, block_hash, block_number, messages_sent: Default::default(), - events: events.into_iter().map(event_conversion).collect(), + events: events_converted, execution_result, }), - mp_transactions::TxType::L1Handler => TransactionReceipt::L1Handler(L1HandlerTransactionReceipt { + mp_transactions::Transaction::L1Handler(_) => TransactionReceipt::L1Handler(L1HandlerTransactionReceipt { transaction_hash, - actual_fee: Default::default(), + actual_fee, finality_status: TransactionFinalityStatus::AcceptedOnL2, block_hash, block_number, messages_sent: Default::default(), - events: events.into_iter().map(event_conversion).collect(), + events: events_converted, execution_result, }), }; @@ -1017,7 +1105,7 @@ where B: BlockT, ::Extrinsic: Send + Sync + 'static, { - pool.submit_one(&SPBlockId::hash(best_block_hash), TX_SOURCE, extrinsic).await.map_err(|e| { + pool.submit_one(best_block_hash, TX_SOURCE, extrinsic).await.map_err(|e| { error!("Failed to submit extrinsic: {:?}", e); match e.into_pool_error() { Ok(PoolError::InvalidTransaction(InvalidTransaction::BadProof)) => StarknetRpcApiError::ValidationFailure, diff --git a/crates/client/transaction-pool/Cargo.toml b/crates/client/transaction-pool/Cargo.toml deleted file mode 100644 index 1de8b36c6a..0000000000 --- a/crates/client/transaction-pool/Cargo.toml +++ /dev/null @@ -1,43 +0,0 @@ -[package] -name = "mc-transaction-pool" -version = "4.0.0-dev" -authors = ["Parity Technologies "] -edition = "2021" -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -homepage = "https://substrate.io" -repository = "https://github.com/paritytech/substrate/" -description = "Substrate transaction pool implementation." -readme = "README.md" - -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[dependencies] -async-trait = { workspace = true } -futures = { workspace = true } -futures-timer = { workspace = true } -linked-hash-map = { workspace = true } -log = { workspace = true } -num-traits = { workspace = true } -parking_lot = { workspace = true } -prometheus-endpoint = { workspace = true } -sc-client-api = { workspace = true } -sc-transaction-pool = { workspace = true } -sc-transaction-pool-api = { workspace = true } -sc-utils = { workspace = true } -scale-codec = { workspace = true, default-features = true } -serde = { workspace = true } -sp-api = { workspace = true } -sp-blockchain = { workspace = true } -sp-core = { workspace = true } -sp-runtime = { workspace = true } -sp-tracing = { workspace = true } -sp-transaction-pool = { workspace = true } -thiserror = { workspace = true } - -[dev-dependencies] -substrate-test-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -substrate-test-runtime-client = { workspace = true } -substrate-test-runtime-transaction-pool = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } -sc-block-builder = { workspace = true } -sp-consensus = { workspace = true } diff --git a/crates/client/transaction-pool/README.md b/crates/client/transaction-pool/README.md deleted file mode 100644 index 83c205dc74..0000000000 --- a/crates/client/transaction-pool/README.md +++ /dev/null @@ -1,368 +0,0 @@ -Substrate transaction pool implementation. - -License: GPL-3.0-or-later WITH Classpath-exception-2.0 - -# Problem Statement - -The transaction pool is responsible for maintaining a set of transactions that -possible to include by block authors in upcoming blocks. Transactions are -received either from networking (gossiped by other peers) or RPC (submitted -locally). - -The main task of the pool is to prepare an ordered list of transactions for -block authorship module. The same list is useful for gossiping to other peers, -but note that it's not a hard requirement for the gossiped transactions to be -exactly the same (see implementation notes below). - -It's within block author incentives to have the transactions stored and ordered -in such a way to: - -1. Maximize block author's profits (value of the produced block) -2. Minimize block author's amount of work (time to produce block) - -In the case of FRAME the first property is simply making sure that the fee per -weight unit is the highest (high `tip` values), the second is about avoiding -feeding transactions that cannot be part of the next block (they are invalid, -obsolete, etc). - -From the transaction pool PoV, transactions are simply opaque blob of bytes, -it's required to query the runtime (via `TaggedTransactionQueue` Runtime API) to -verify transaction's mere correctness and extract any information about how the -transaction relates to other transactions in the pool and current on-chain -state. Only valid transactions should be stored in the pool. - -Each imported block can affect validity of transactions already in the pool. -Block authors expect from the pool to get most up to date information about -transactions that can be included in the block that they are going to build on -top of the just imported one. The process of ensuring this property is called -_pruning_. During pruning the pool should remove transactions which are -considered invalid by the runtime (queried at current best imported block). - -Since the blockchain is not always linear, forks need to be correctly handled by -the transaction pool as well. In case of a fork, some blocks are _retracted_ -from the canonical chain, and some other blocks get _enacted_ on top of some -common ancestor. The transactions from retracted blocks could simply be -discarded, but it's desirable to make sure they are still considered for -inclusion in case they are deemed valid by the runtime state at best, recently -enacted block (fork the chain re-organized to). - -Transaction pool should also offer a way of tracking transaction lifecycle in -the pool, it's broadcasting status, block inclusion, finality, etc. - -## Transaction Validity details - -Information retrieved from the runtime are encapsulated in the -`TransactionValidity` type. - -```rust -pub type TransactionValidity = Result; - -pub struct ValidTransaction { - pub requires: Vec, - pub provides: Vec, - pub priority: TransactionPriority, - pub longevity: TransactionLongevity, - pub propagate: bool, -} - -pub enum TransactionValidityError { - Invalid(/* details */), - Unknown(/* details */), -} -``` - -We will go through each of the parameter now to understand the requirements they -create for transaction ordering. - -The runtime is expected to return these values in a deterministic fashion. -Calling the API multiple times given exactly the same state must return same -results. Field-specific rules are described below. - -### `requires` / `provides` - -These two fields contain a set of `TransactionTag`s (opaque blobs) associated -with a given transaction. This is a mechanism for the runtime to be able to -express dependencies between transactions (that this transaction pool can take -account of). By looking at these fields we can establish a transaction's -readiness for block inclusion. - -The `provides` set contains properties that will be _satisfied_ in case the -transaction is successfully added to a block. Only a transaction in a block may -provide a specific tag. `requires` contains properties that must be satisfied -**before** the transaction can be included to a block. - -Note that a transaction with empty `requires` set can be added to a block -immediately, there are no other transactions that it expects to be included -before. - -For some given series of transactions the `provides` and `requires` fields will -create a (simple) directed acyclic graph. The _sources_ in such graph, if they -don't have any extra `requires` tags (i.e. they have their all dependencies -_satisfied_), should be considered for block inclusion first. Multiple -transactions that are ready for block inclusion should be ordered by `priority` -(see below). - -Note the process of including transactions to a block is basically building the -graph, then selecting "the best" source vertex (transaction) with all tags -satisfied and removing it from that graph. - -#### Examples - -- A transaction in Bitcoin-like chain will `provide` generated UTXOs and will - `require` UTXOs it is still awaiting for (note that it's not necessarily all - require inputs, since some of them might already be spendable (i.e. the UTXO - is in state)) - -- A transaction in account-based chain will `provide` a - `(sender, transaction_index/nonce)` (as one tag), and will `require` - `(sender, nonce - 1)` in case `on_chain_nonce < nonce - 1`. - -#### Rules & caveats - -- `provides` must not be empty -- transactions with an overlap in `provides` tags are mutually exclusive -- checking validity of transaction that `requires` tag `A` after including - transaction that provides that tag must not return `A` in `requires` again -- runtime developers should avoid re-using `provides` tag (i.e. it should be - unique) -- there should be no cycles in transaction dependencies -- caveat: on-chain state conditions may render transaction invalid despite no - `requires` tags -- caveat: on-chain state conditions may render transaction valid despite some - `requires` tags -- caveat: including transactions to a chain might make them valid again right - away (for instance UTXO transaction gets in, but since we don't store spent - outputs it will be valid again, awaiting the same inputs/tags to be satisfied) - -### `priority` - -Transaction priority describes importance of the transaction relative to other -transactions in the pool. Block authors can expect benefiting from including -such transactions before others. - -Note that we can't simply order transactions in the pool by `priority`, because -first we need to make sure that all of the transaction requirements are -satisfied (see `requires/provides` section). However if we consider a set of -transactions which all have their requirements (tags) satisfied, the block -author should be choosing the ones with highest priority to include to the next -block first. - -`priority` can be any number between `0` (lowest inclusion priority) to -`u64::MAX` (highest inclusion priority). - -- `priority` of transaction may change over time -- on-chain conditions may affect `priority` -- given two transactions with overlapping `provides` tags, the one with higher - `priority` should be preferred. However we can also look at the total priority - of a subtree rooted at that transaction and compare that instead (i.e. even - though the transaction itself has lower `priority` it "unlocks" other high - priority transactions). - -### `longevity` - -Longevity describes how long (in blocks) the transaction is expected to be -valid. This parameter only gives a hint to the transaction pool how long current -transaction may still be valid. Note that it does not guarantee the transaction -is valid all that time though. - -- `longevity` of transaction may change over time -- on-chain conditions may affect `longevity` -- after `longevity` lapses, the transaction may still be valid - -### `propagate` - -This parameter instructs the pool propagate/gossip a transaction to node peers. -By default this should be `true`, however in some cases it might be undesirable -to propagate transactions further. Examples might include heavy transactions -produced by block authors in offchain workers (DoS) or risking being front run -by someone else after finding some non trivial solution or equivocation, etc. - -### 'TransactionSource` - -To make it possible for the runtime to distinguish if the transaction that is -being validated was received over the network or submitted using local RPC or -maybe it's simply part of a block that is being imported, the transaction pool -should pass additional `TransactionSource` parameter to the validity function -runtime call. - -This can be used by runtime developers to quickly reject transactions that for -instance are not expected to be gossiped in the network. - -### `Invalid` transaction - -In case the runtime returns an `Invalid` error it means the transaction cannot -be added to a block at all. Extracting the actual reason of invalidity gives -more details about the source. For instance `Stale` transaction just indicates -the transaction was already included in a block, while `BadProof` signifies -invalid signature. Invalidity might also be temporary. In case of -`ExhaustsResources` the transaction does not fit to the current block, but it -might be okay for the next one. - -### `Unknown` transaction - -In case of `Unknown` validity, the runtime cannot determine if the transaction -is valid or not in current block. However this situation might be temporary, so -it is expected for the transaction to be retried in the future. - -# Implementation - -An ideal transaction pool should be storing only transactions that are -considered valid by the runtime at current best imported block. After every -block is imported, the pool should: - -1. Revalidate all transactions in the pool and remove the invalid ones. -1. Construct the transaction inclusion graph based on `provides/requires` tags. - Some transactions might not be reachable (have unsatisfied dependencies), - they should be just left out in the pool. -1. On block author request, the graph should be copied and transactions should - be removed one-by-one from the graph starting from the one with highest - priority and all conditions satisfied. - -With current gossip protocol, networking should propagate transactions in the -same order as block author would include them. Most likely it's fine if we -propagate transactions with cumulative weight not exceeding upcoming `N` blocks -(choosing `N` is subject to networking conditions and block times). - -Note that it's not a strict requirement though to propagate exactly the same -transactions that are prepared for block inclusion. Propagation is best effort, -especially for block authors and is not directly incentivised. However the -networking protocol might penalise peers that send invalid or useless -transactions so we should be nice to others. Also see below a proposal to -instead of gossiping everything have other peers request transactions they are -interested in. - -Since the pool is expected to store more transactions than what can fit in a -single block, validating the entire pool on every block might not be feasible. -This means that the actual implementation might need to take some shortcuts. - -## Suggestions & caveats - -1. The validity of a transaction should not change significantly from block to - block. I.e. changes in validity should happen predictably, e.g. `longevity` - decrements by 1, `priority` stays the same, `requires` changes if transaction - that provided a tag was included in block, `provides` does not change, etc. - -1. That means we don't have to revalidate every transaction after every block - import, but we need to take care of removing potentially stale transactions. - -1. Transactions with exactly the same bytes are most likely going to give the - same validity results. We can essentially treat them as identical. - -1. Watch out for re-organisations and re-importing transactions from retracted - blocks. - -1. In the past there were many issues found when running small networks with a - lot of re-orgs. Make sure that transactions are never lost. - -1. The UTXO model is quite challenging. A transaction becomes valid right after - it's included in a block, however it is waiting for exactly the same inputs - to be spent, so it will never really be included again. - -1. Note that in a non-ideal implementation the state of the pool will most - likely always be a bit off, i.e. some transactions might be still in the - pool, but they are invalid. The hard decision is about trade-offs you take. - -1. Note that import notification is not reliable - you might not receive a - notification about every imported block. - -## Potential implementation ideas - -1. Block authors remove transactions from the pool when they author a block. We - still store them around to re-import in case the block does not end up - canonical. This only works if the block is actively authoring blocks (also - see below). - -1. We don't prune, but rather remove a fixed amount of transactions from the - front of the pool (number based on average/max transactions per block from - the past) and re-validate them, reimporting the ones that are still valid. - -1. We periodically validate all transactions in the pool in batches. - -1. To minimize runtime calls, we introduce the batch-verify call. Note it should - reset the state (overlay) after every verification. - -1. Consider leveraging finality. Maybe we could verify against latest finalised - block instead. With this the pool in different nodes can be more similar - which might help with gossiping (see set reconciliation). Note that finality - is not a strict requirement for a Substrate chain to have though. - -1. Perhaps we could avoid maintaining ready/future queues as currently, but - rather if a transaction doesn't have all requirements satisfied by existing - transactions we attempt to re-import it in the future. - -1. Instead of maintaining a full pool with total ordering we attempt to maintain - a set of next (couple of) blocks. We could introduce batch-validate runtime - api method that pretty much attempts to simulate actual block inclusion of a - set of such transactions (without necessarily fully running/dispatching - them). Importing a transaction would consist of figuring out which next block - this transaction has a chance to be included in and then attempting to either - push it back or replace some existing transactions. - -1. Perhaps we could use some immutable graph structure to easily add/remove - transactions. We need some traversal method that takes priority and - reachability into account. - -1. It was discussed in the past to use set reconciliation strategies instead of - simply broadcasting all/some transactions to all/selected peers. An - Ethereum's - [EIP-2464](https://github.com/ethereum/EIPs/blob/5b9685bb9c7ba0f5f921e4d3f23504f7ef08d5b1/EIPS/eip-2464.md) - might be a good first approach to reduce transaction gossip. - -# Current implementation - -Current implementation of the pool is a result of experiences from Ethereum's -pool implementation, but also has some warts coming from the learning process of -Substrate's generic nature and light client support. - -The pool consists of basically two independent parts: - -1. The transaction pool itself. -2. Maintenance background task. - -The pool is split into `ready` pool and `future` pool. The latter contains -transactions that don't have their requirements satisfied, and the former holds -transactions that can be used to build a graph of dependencies. Note that the -graph is built ad-hoc during the traversal process (using the `ready` iterator). -This makes the importing process cheaper (we don't need to find the exact -position in the queue or graph), but traversal process slower (logarithmic). -However most of the time we will only need the beginning of the total ordering -of transactions for block inclusion or network propagation, hence the decision. - -The maintenance task is responsible for: - -1. Periodically revalidating pool's transactions (revalidation queue). -1. Handling block import notifications and doing pruning + re-importing of - transactions from retracted blocks. -1. Handling finality notifications and relaying that to transaction-specific - listeners. - -Additionally we maintain a list of recently included/rejected transactions -(`PoolRotator`) to quickly reject transactions that are unlikely to be valid to -limit number of runtime verification calls. - -Each time a transaction is imported, we first verify it's validity and later -find if the tags it `requires` can be satisfied by transactions already in -`ready` pool. In case the transaction is imported to the `ready` pool we -additionally _promote_ transactions from the `future` pool if the transaction -happened to fulfill their requirements. Note we need to cater for cases where a -transaction might replace an already existing transaction in the pool. In such -case we check the entire sub-tree of transactions that we are about to replace, -compare their cumulative priority to determine which subtree to keep. - -After a block is imported we kick-off the pruning procedure. We first attempt to -figure out what tags were satisfied by a transaction in that block. For each -block transaction we either call into the runtime to get it's `ValidTransaction` -object, or we check the pool if that transaction is already known to spare the -runtime call. From this we gather the full set of `provides` tags and perform -pruning of the `ready` pool based on that. Also, we promote all transactions -from `future` that have their tags satisfied. - -In case we remove transactions that we are unsure if they were already included -in the current block or some block in the past, it gets added to the -revalidation queue and attempts to be re-imported by the background task in the -future. - -Runtime calls to verify transactions are performed from a separate (limited) -thread pool to avoid interfering too much with other subsystems of the node. We -definitely don't want to have all cores validating network transactions, because -all of these transactions need to be considered untrusted (potentially DoS). diff --git a/crates/client/transaction-pool/src/api.rs b/crates/client/transaction-pool/src/api.rs deleted file mode 100644 index 3e1069ffd4..0000000000 --- a/crates/client/transaction-pool/src/api.rs +++ /dev/null @@ -1,286 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Chain api required for the transaction pool. - -use std::marker::PhantomData; -use std::pin::Pin; -use std::sync::Arc; - -use futures::channel::{mpsc, oneshot}; -use futures::future::{ready, Future, FutureExt, Ready}; -use futures::lock::Mutex; -use futures::{SinkExt, StreamExt}; -use prometheus_endpoint::Registry as PrometheusRegistry; -use sc_client_api::blockchain::HeaderBackend; -use sc_client_api::BlockBackend; -use scale_codec::Encode; -use sp_api::{ApiExt, ProvideRuntimeApi}; -use sp_blockchain::{HeaderMetadata, TreeRoute}; -use sp_core::traits::SpawnEssentialNamed; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{self, Block as BlockT, BlockIdTo}; -use sp_runtime::transaction_validity::{TransactionSource, TransactionValidity}; -use sp_transaction_pool::runtime_api::TaggedTransactionQueue; - -use crate::error::{self, Error}; -use crate::metrics::{ApiMetrics, ApiMetricsExt}; -use crate::{graph, LOG_TARGET}; - -/// The transaction pool logic for full client. -#[allow(clippy::type_complexity)] -pub struct FullChainApi { - client: Arc, - _marker: PhantomData, - metrics: Option>, - validation_pool: Arc + Send>>>>>, -} - -/// Spawn a validation task that will be used by the transaction pool to validate transactions. -#[allow(clippy::type_complexity)] -fn spawn_validation_pool_task( - name: &'static str, - receiver: Arc + Send>>>>>, - spawner: &impl SpawnEssentialNamed, -) { - spawner.spawn_essential_blocking( - name, - Some("transaction-pool"), - async move { - loop { - let task = receiver.lock().await.next().await; - match task { - None => return, - Some(task) => task.await, - } - } - } - .boxed(), - ); -} - -impl FullChainApi { - /// Create new transaction pool logic. - pub fn new( - client: Arc, - prometheus: Option<&PrometheusRegistry>, - spawner: &impl SpawnEssentialNamed, - ) -> Self { - let metrics = prometheus.map(ApiMetrics::register).and_then(|r| match r { - Err(err) => { - log::warn!( - target: LOG_TARGET, - "Failed to register transaction pool api prometheus metrics: {:?}", - err, - ); - None - } - Ok(api) => Some(Arc::new(api)), - }); - - let (sender, receiver) = mpsc::channel(0); - - let receiver = Arc::new(Mutex::new(receiver)); - spawn_validation_pool_task("transaction-pool-task-0", receiver.clone(), spawner); - spawn_validation_pool_task("transaction-pool-task-1", receiver, spawner); - - FullChainApi { client, validation_pool: Arc::new(Mutex::new(sender)), _marker: Default::default(), metrics } - } -} - -impl graph::ChainApi for FullChainApi -where - Block: BlockT, - Client: ProvideRuntimeApi - + BlockBackend - + BlockIdTo - + HeaderBackend - + HeaderMetadata, - Client: Send + Sync + 'static, - Client::Api: TaggedTransactionQueue, -{ - type Block = Block; - type Error = error::Error; - type ValidationFuture = Pin> + Send>>; - type BodyFuture = Ready::Extrinsic>>>>; - - fn block_body(&self, hash: Block::Hash) -> Self::BodyFuture { - ready(self.client.block_body(hash).map_err(error::Error::from)) - } - - fn validate_transaction( - &self, - at: &BlockId, - source: TransactionSource, - uxt: graph::ExtrinsicFor, - ) -> Self::ValidationFuture { - let (tx, rx) = oneshot::channel(); - let client = self.client.clone(); - let at = *at; - let validation_pool = self.validation_pool.clone(); - let metrics = self.metrics.clone(); - - async move { - metrics.report(|m| m.validations_scheduled.inc()); - - validation_pool - .lock() - .await - .send( - async move { - let res = validate_transaction_blocking(&*client, &at, source, uxt); - let _ = tx.send(res); - metrics.report(|m| m.validations_finished.inc()); - } - .boxed(), - ) - .await - .map_err(|e| Error::RuntimeApi(format!("Validation pool down: {:?}", e)))?; - - match rx.await { - Ok(r) => r, - Err(_) => Err(Error::RuntimeApi("Validation was canceled".into())), - } - } - .boxed() - } - - fn block_id_to_number(&self, at: &BlockId) -> error::Result>> { - self.client.to_number(at).map_err(|e| Error::BlockIdConversion(e.to_string())) - } - - fn block_id_to_hash(&self, at: &BlockId) -> error::Result>> { - self.client.to_hash(at).map_err(|e| Error::BlockIdConversion(e.to_string())) - } - - fn hash_and_length(&self, ex: &graph::ExtrinsicFor) -> (graph::ExtrinsicHash, usize) { - ex.using_encoded(|x| ( as traits::Hash>::hash(x), x.len())) - } - - fn block_header( - &self, - hash: ::Hash, - ) -> Result::Header>, Self::Error> { - self.client.header(hash).map_err(Into::into) - } - - fn tree_route( - &self, - from: ::Hash, - to: ::Hash, - ) -> Result, Self::Error> { - sp_blockchain::tree_route::(&*self.client, from, to).map_err(Into::into) - } -} - -/// Helper function to validate a transaction using a full chain API. -/// This method will call into the runtime to perform the validation. -fn validate_transaction_blocking( - client: &Client, - at: &BlockId, - source: TransactionSource, - uxt: graph::ExtrinsicFor>, -) -> error::Result -where - Block: BlockT, - Client: ProvideRuntimeApi - + BlockBackend - + BlockIdTo - + HeaderBackend - + HeaderMetadata, - Client: Send + Sync + 'static, - Client::Api: TaggedTransactionQueue, -{ - sp_tracing::within_span!(sp_tracing::Level::TRACE, "validate_transaction"; - { - let block_hash = client.to_hash(at) - .map_err(|e| Error::RuntimeApi(e.to_string()))? - .ok_or_else(|| Error::RuntimeApi(format!("Could not get hash for block `{:?}`.", at)))?; - - let runtime_api = client.runtime_api(); - let api_version = sp_tracing::within_span! { sp_tracing::Level::TRACE, "check_version"; - runtime_api - .api_version::>(block_hash) - .map_err(|e| Error::RuntimeApi(e.to_string()))? - .ok_or_else(|| Error::RuntimeApi( - format!("Could not find `TaggedTransactionQueue` api for block `{:?}`.", at) - )) - }?; - - use sp_api::Core; - - sp_tracing::within_span!( - sp_tracing::Level::TRACE, "runtime::validate_transaction"; - { - if api_version >= 3 { - runtime_api.validate_transaction(block_hash, source, uxt, block_hash) - .map_err(|e| Error::RuntimeApi(e.to_string())) - } else { - let block_number = client.to_number(at) - .map_err(|e| Error::RuntimeApi(e.to_string()))? - .ok_or_else(|| - Error::RuntimeApi(format!("Could not get number for block `{:?}`.", at)) - )?; - - // The old versions require us to call `initialize_block` before. - runtime_api.initialize_block(block_hash, &sp_runtime::traits::Header::new( - block_number + sp_runtime::traits::One::one(), - Default::default(), - Default::default(), - block_hash, - Default::default()), - ).map_err(|e| Error::RuntimeApi(e.to_string()))?; - - if api_version == 2 { - #[allow(deprecated)] // old validate_transaction - runtime_api.validate_transaction_before_version_3(block_hash, source, uxt) - .map_err(|e| Error::RuntimeApi(e.to_string())) - } else { - #[allow(deprecated)] // old validate_transaction - runtime_api.validate_transaction_before_version_2(block_hash, uxt) - .map_err(|e| Error::RuntimeApi(e.to_string())) - } - } - }) - }) -} - -impl FullChainApi -where - Block: BlockT, - Client: ProvideRuntimeApi - + BlockBackend - + BlockIdTo - + HeaderBackend - + HeaderMetadata, - Client: Send + Sync + 'static, - Client::Api: TaggedTransactionQueue, -{ - /// Validates a transaction by calling into the runtime, same as - /// `validate_transaction` but blocks the current thread when performing - /// validation. Only implemented for `FullChainApi` since we can call into - /// the runtime locally. - pub fn validate_transaction_blocking( - &self, - at: &BlockId, - source: TransactionSource, - uxt: graph::ExtrinsicFor, - ) -> error::Result { - validate_transaction_blocking(&*self.client, at, source, uxt) - } -} diff --git a/crates/client/transaction-pool/src/enactment_state.rs b/crates/client/transaction-pool/src/enactment_state.rs deleted file mode 100644 index 20a71062af..0000000000 --- a/crates/client/transaction-pool/src/enactment_state.rs +++ /dev/null @@ -1,189 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Substrate transaction pool implementation. - -use num_traits::CheckedSub; -use sc_transaction_pool_api::ChainEvent; -use sp_blockchain::TreeRoute; -use sp_runtime::traits::{Block as BlockT, NumberFor}; - -use crate::LOG_TARGET; - -/// The threshold since the last update where we will skip any maintenance for blocks. -/// -/// This includes tracking re-orgs and sending out certain notifications. In general this shouldn't -/// happen and may only happen when the node is doing a full sync. -const SKIP_MAINTENANCE_THRESHOLD: u16 = 20; - -/// Helper struct for keeping track of the current state of processed new best -/// block and finalized events. The main purpose of keeping track of this state -/// is to figure out which phases (enactment / finalization) of transaction pool -/// maintenance are needed. -/// -/// Given the following chain: -/// -/// B1-C1-D1-E1 -/// / -/// A -/// \ -/// B2-C2-D2-E2 -/// -/// Some scenarios and expected behavior for sequence of `NewBestBlock` (`nbb`) and `Finalized` -/// (`f`) events: -/// -/// - `nbb(C1)`, `f(C1)` -> false (enactment was already performed in `nbb(C1))` -/// - `f(C1)`, `nbb(C1)` -> false (enactment was already performed in `f(C1))` -/// - `f(C1)`, `nbb(D2)` -> false (enactment was already performed in `f(C1)`, -/// we should not retract finalized block) -/// - `f(C1)`, `f(C2)`, `nbb(C1)` -> false -/// - `nbb(C1)`, `nbb(C2)` -> true (switching fork is OK) -/// - `nbb(B1)`, `nbb(B2)` -> true -/// - `nbb(B1)`, `nbb(C1)`, `f(C1)` -> false (enactment was already performed in `nbb(B1)`) -/// - `nbb(C1)`, `f(B1)` -> false (enactment was already performed in `nbb(B2)`) -pub struct EnactmentState -where - Block: BlockT, -{ - recent_best_block: Block::Hash, - recent_finalized_block: Block::Hash, -} - -/// Enactment action that should be performed after processing the `ChainEvent` -#[derive(Debug)] -pub enum EnactmentAction { - /// Both phases of maintenance shall be skipped - Skip, - /// Both phases of maintenance shall be performed - HandleEnactment(TreeRoute), - /// Enactment phase of maintenance shall be skipped - HandleFinalization, -} - -impl EnactmentState -where - Block: BlockT, -{ - /// Returns a new `EnactmentState` initialized with the given parameters. - pub fn new(recent_best_block: Block::Hash, recent_finalized_block: Block::Hash) -> Self { - EnactmentState { recent_best_block, recent_finalized_block } - } - - /// Returns the recently finalized block. - pub fn recent_finalized_block(&self) -> Block::Hash { - self.recent_finalized_block - } - - /// Updates the state according to the given `ChainEvent`, returning - /// `Some(tree_route)` with a tree route including the blocks that need to - /// be enacted/retracted. If no enactment is needed then `None` is returned. - pub fn update( - &mut self, - event: &ChainEvent, - tree_route: &TreeRouteF, - hash_to_number: &BlockNumberF, - ) -> Result, String> - where - TreeRouteF: Fn(Block::Hash, Block::Hash) -> Result, String>, - BlockNumberF: Fn(Block::Hash) -> Result>, String>, - { - let (new_hash, current_hash, finalized) = match event { - ChainEvent::NewBestBlock { hash, .. } => (*hash, self.recent_best_block, false), - ChainEvent::Finalized { hash, .. } => (*hash, self.recent_finalized_block, true), - }; - - // do not proceed with txpool maintain if block distance is to high - let skip_maintenance = match (hash_to_number(new_hash), hash_to_number(current_hash)) { - (Ok(Some(new)), Ok(Some(current))) => new.checked_sub(¤t) > Some(SKIP_MAINTENANCE_THRESHOLD.into()), - _ => true, - }; - - if skip_maintenance { - log::debug!(target: LOG_TARGET, "skip maintain: tree_route would be too long"); - self.force_update(event); - return Ok(EnactmentAction::Skip); - } - - // block was already finalized - if self.recent_finalized_block == new_hash { - log::debug!(target: LOG_TARGET, "handle_enactment: block already finalized"); - return Ok(EnactmentAction::Skip); - } - - // compute actual tree route from best_block to notified block, and use - // it instead of tree_route provided with event - let tree_route = tree_route(self.recent_best_block, new_hash)?; - - log::debug!( - target: LOG_TARGET, - "resolve hash:{:?} finalized:{:?} tree_route:{:?} best_block:{:?} finalized_block:{:?}", - new_hash, - finalized, - tree_route, - self.recent_best_block, - self.recent_finalized_block - ); - - // check if recently finalized block is on retracted path. this could be - // happening if we first received a finalization event and then a new - // best event for some old stale best head. - if tree_route.retracted().iter().any(|x| x.hash == self.recent_finalized_block) { - log::debug!( - target: LOG_TARGET, - "Recently finalized block {} would be retracted by ChainEvent {}, skipping", - self.recent_finalized_block, - new_hash - ); - return Ok(EnactmentAction::Skip); - } - - if finalized { - self.recent_finalized_block = new_hash; - - // if there are no enacted blocks in best_block -> hash tree_route, - // it means that block being finalized was already enacted (this - // case also covers best_block == new_hash), recent_best_block - // remains valid. - if tree_route.enacted().is_empty() { - log::trace!(target: LOG_TARGET, "handle_enactment: no newly enacted blocks since recent best block"); - return Ok(EnactmentAction::HandleFinalization); - } - - // otherwise enacted finalized block becomes best block... - } - - self.recent_best_block = new_hash; - - Ok(EnactmentAction::HandleEnactment(tree_route)) - } - - /// Forces update of the state according to the given `ChainEvent`. Intended to be used as a - /// fallback when tree_route cannot be computed. - pub fn force_update(&mut self, event: &ChainEvent) { - match event { - ChainEvent::NewBestBlock { hash, .. } => self.recent_best_block = *hash, - ChainEvent::Finalized { hash, .. } => self.recent_finalized_block = *hash, - }; - log::debug!( - target: LOG_TARGET, - "forced update: {:?}, {:?}", - self.recent_best_block, - self.recent_finalized_block, - ); - } -} diff --git a/crates/client/transaction-pool/src/error.rs b/crates/client/transaction-pool/src/error.rs deleted file mode 100644 index a11d692702..0000000000 --- a/crates/client/transaction-pool/src/error.rs +++ /dev/null @@ -1,50 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Transaction pool error. - -use sc_transaction_pool_api::error::Error as TxPoolError; - -/// Transaction pool result. -pub type Result = std::result::Result; - -/// Transaction pool error type. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("Transaction pool error: {0}")] - Pool(#[from] TxPoolError), - - #[error("Blockchain error: {0}")] - Blockchain(#[from] sp_blockchain::Error), - - #[error("Block conversion error: {0}")] - BlockIdConversion(String), - - #[error("Runtime error: {0}")] - RuntimeApi(String), -} - -impl sc_transaction_pool_api::error::IntoPoolError for Error { - fn into_pool_error(self) -> std::result::Result { - match self { - Error::Pool(e) => Ok(e), - e => Err(e), - } - } -} diff --git a/crates/client/transaction-pool/src/graph/base_pool.rs b/crates/client/transaction-pool/src/graph/base_pool.rs deleted file mode 100644 index b24a72ca7b..0000000000 --- a/crates/client/transaction-pool/src/graph/base_pool.rs +++ /dev/null @@ -1,932 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! A basic version of the dependency graph. -//! -//! For a more full-featured pool, have a look at the `pool` module. - -use std::cmp::Ordering; -use std::collections::HashSet; -use std::sync::Arc; -use std::{fmt, hash}; - -use log::{debug, trace, warn}; -use sc_transaction_pool_api::{error, InPoolTransaction, PoolStatus}; -use serde::Serialize; -use sp_core::hexdisplay::HexDisplay; -use sp_runtime::traits::Member; -use sp_runtime::transaction_validity::{ - TransactionLongevity as Longevity, TransactionPriority as Priority, TransactionSource as Source, - TransactionTag as Tag, -}; - -use super::future::{FutureTransactions, WaitingTransaction}; -use super::ready::{BestIterator, ReadyTransactions, TransactionRef}; -use crate::LOG_TARGET; - -/// Successful import result. -#[derive(Debug, PartialEq, Eq)] -pub enum Imported { - /// Transaction was successfully imported to Ready queue. - Ready { - /// Hash of transaction that was successfully imported. - hash: Hash, - /// Transactions that got promoted from the Future queue. - promoted: Vec, - /// Transactions that failed to be promoted from the Future queue and are now discarded. - failed: Vec, - /// Transactions removed from the Ready pool (replaced). - removed: Vec>>, - }, - /// Transaction was successfully imported to Future queue. - Future { - /// Hash of transaction that was successfully imported. - hash: Hash, - }, -} - -impl Imported { - /// Returns the hash of imported transaction. - pub fn hash(&self) -> &Hash { - use self::Imported::*; - match *self { - Ready { ref hash, .. } => hash, - Future { ref hash, .. } => hash, - } - } -} - -/// Status of pruning the queue. -#[derive(Debug)] -pub struct PruneStatus { - /// A list of imports that satisfying the tag triggered. - pub promoted: Vec>, - /// A list of transactions that failed to be promoted and now are discarded. - pub failed: Vec, - /// A list of transactions that got pruned from the ready queue. - pub pruned: Vec>>, -} - -/// Immutable transaction -#[cfg_attr(test, derive(Clone))] -#[derive(PartialEq, Eq)] -pub struct Transaction { - /// Raw extrinsic representing that transaction. - pub data: Extrinsic, - /// Number of bytes encoding of the transaction requires. - pub bytes: usize, - /// Transaction hash (unique) - pub hash: Hash, - /// Transaction priority (higher = better) - pub priority: Priority, - /// At which block the transaction becomes invalid? - pub valid_till: Longevity, - /// Tags required by the transaction. - pub requires: Vec, - /// Tags that this transaction provides. - pub provides: Vec, - /// Should that transaction be propagated. - pub propagate: bool, - /// Source of that transaction. - pub source: Source, -} - -impl AsRef for Transaction { - fn as_ref(&self) -> &Extrinsic { - &self.data - } -} - -impl InPoolTransaction for Transaction { - type Transaction = Extrinsic; - type Hash = Hash; - - fn data(&self) -> &Extrinsic { - &self.data - } - - fn hash(&self) -> &Hash { - &self.hash - } - - fn priority(&self) -> &Priority { - &self.priority - } - - fn longevity(&self) -> &Longevity { - &self.valid_till - } - - fn requires(&self) -> &[Tag] { - &self.requires - } - - fn provides(&self) -> &[Tag] { - &self.provides - } - - fn is_propagable(&self) -> bool { - self.propagate - } -} - -impl Transaction { - /// Explicit transaction clone. - /// - /// Transaction should be cloned only if absolutely necessary && we want - /// every reason to be commented. That's why we `Transaction` is not `Clone`, - /// but there's explicit `duplicate` method. - pub fn duplicate(&self) -> Self { - Self { - data: self.data.clone(), - bytes: self.bytes, - hash: self.hash.clone(), - priority: self.priority, - source: self.source, - valid_till: self.valid_till, - requires: self.requires.clone(), - provides: self.provides.clone(), - propagate: self.propagate, - } - } -} - -impl fmt::Debug for Transaction -where - Hash: fmt::Debug, - Extrinsic: fmt::Debug, -{ - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let join_tags = - |tags: &[Tag]| tags.iter().map(|tag| HexDisplay::from(tag).to_string()).collect::>().join(", "); - - write!(fmt, "Transaction {{ ")?; - write!(fmt, "hash: {:?}, ", &self.hash)?; - write!(fmt, "priority: {:?}, ", &self.priority)?; - write!(fmt, "valid_till: {:?}, ", &self.valid_till)?; - write!(fmt, "bytes: {:?}, ", &self.bytes)?; - write!(fmt, "propagate: {:?}, ", &self.propagate)?; - write!(fmt, "source: {:?}, ", &self.source)?; - write!(fmt, "requires: [{}], ", join_tags(&self.requires))?; - write!(fmt, "provides: [{}], ", join_tags(&self.provides))?; - write!(fmt, "data: {:?}", &self.data)?; - write!(fmt, "}}")?; - Ok(()) - } -} - -/// Store last pruned tags for given number of invocations. -const RECENTLY_PRUNED_TAGS: usize = 2; - -/// Transaction pool. -/// -/// Builds a dependency graph for all transactions in the pool and returns -/// the ones that are currently ready to be executed. -/// -/// General note: -/// If function returns some transactions it usually means that importing them -/// as-is for the second time will fail or produce unwanted results. -/// Most likely it is required to revalidate them and recompute set of -/// required tags. -#[derive(Debug)] -pub struct BasePool { - reject_future_transactions: bool, - future: FutureTransactions, - ready: ReadyTransactions, - /// Store recently pruned tags (for last two invocations). - /// - /// This is used to make sure we don't accidentally put - /// transactions to future in case they were just stuck in verification. - recently_pruned: [HashSet; RECENTLY_PRUNED_TAGS], - recently_pruned_index: usize, -} - -impl Default for BasePool { - fn default() -> Self { - Self::new(false) - } -} - -impl BasePool { - /// Create new pool given reject_future_transactions flag. - pub fn new(reject_future_transactions: bool) -> Self { - Self { - reject_future_transactions, - future: Default::default(), - ready: Default::default(), - recently_pruned: Default::default(), - recently_pruned_index: 0, - } - } - - /// Temporary enables future transactions, runs closure and then restores - /// `reject_future_transactions` flag back to previous value. - /// - /// The closure accepts the mutable reference to the pool and original value - /// of the `reject_future_transactions` flag. - pub(crate) fn with_futures_enabled(&mut self, closure: impl FnOnce(&mut Self, bool) -> T) -> T { - let previous = self.reject_future_transactions; - self.reject_future_transactions = false; - let return_value = closure(self, previous); - self.reject_future_transactions = previous; - return_value - } - - /// Returns if the transaction for the given hash is already imported. - pub fn is_imported(&self, tx_hash: &Hash) -> bool { - self.future.contains(tx_hash) || self.ready.contains(tx_hash) - } - - /// Imports transaction to the pool. - /// - /// The pool consists of two parts: Future and Ready. - /// The former contains transactions that require some tags that are not yet provided by - /// other transactions in the pool. - /// The latter contains transactions that have all the requirements satisfied and are - /// ready to be included in the block. - pub fn import(&mut self, tx: Transaction) -> error::Result> { - if self.is_imported(&tx.hash) { - return Err(error::Error::AlreadyImported(Box::new(tx.hash))); - } - - let tx = WaitingTransaction::new(tx, self.ready.provided_tags(), &self.recently_pruned); - trace!(target: LOG_TARGET, "[{:?}] {:?}", tx.transaction.hash, tx); - debug!( - target: LOG_TARGET, - "[{:?}] Importing to {}", - tx.transaction.hash, - if tx.is_ready() { "ready" } else { "future" } - ); - - // If all tags are not satisfied import to future. - if !tx.is_ready() { - if self.reject_future_transactions { - return Err(error::Error::RejectedFutureTransaction); - } - - let hash = tx.transaction.hash.clone(); - self.future.import(tx); - return Ok(Imported::Future { hash }); - } - - self.import_to_ready(tx) - } - - /// Imports transaction to ready queue. - /// - /// NOTE the transaction has to have all requirements satisfied. - fn import_to_ready(&mut self, tx: WaitingTransaction) -> error::Result> { - let hash = tx.transaction.hash.clone(); - let mut promoted = vec![]; - let mut failed = vec![]; - let mut removed = vec![]; - - let mut first = true; - let mut to_import = vec![tx]; - - // take first transaction from the list - while let Some(tx) = to_import.pop() { - // find transactions in Future that it unlocks - to_import.append(&mut self.future.satisfy_tags(&tx.transaction.provides)); - - // import this transaction - let current_hash = tx.transaction.hash.clone(); - match self.ready.import(tx) { - Ok(mut replaced) => { - if !first { - promoted.push(current_hash); - } - // The transactions were removed from the ready pool. We might attempt to - // re-import them. - removed.append(&mut replaced); - } - // transaction failed to be imported. - Err(e) => { - if first { - debug!(target: LOG_TARGET, "[{:?}] Error importing: {:?}", current_hash, e); - return Err(e); - } else { - failed.push(current_hash); - } - } - } - first = false; - } - - // An edge case when importing transaction caused - // some future transactions to be imported and that - // future transactions pushed out current transaction. - // This means that there is a cycle and the transactions should - // be moved back to future, since we can't resolve it. - if removed.iter().any(|tx| tx.hash == hash) { - // We still need to remove all transactions that we promoted - // since they depend on each other and will never get to the best iterator. - self.ready.remove_subtree(&promoted); - - debug!(target: LOG_TARGET, "[{:?}] Cycle detected, bailing.", hash); - return Err(error::Error::CycleDetected); - } - - Ok(Imported::Ready { hash, promoted, failed, removed }) - } - - /// Returns an iterator over ready transactions in the pool. - pub fn ready(&self) -> BestIterator { - self.ready.get() - } - - /// Returns an iterator over future transactions in the pool. - pub fn futures(&self) -> impl Iterator> { - self.future.all() - } - - /// Returns pool transactions given list of hashes. - /// - /// Includes both ready and future pool. For every hash in the `hashes` - /// iterator an `Option` is produced (so the resulting `Vec` always have the same length). - pub fn by_hashes(&self, hashes: &[Hash]) -> Vec>>> { - let ready = self.ready.by_hashes(hashes); - let future = self.future.by_hashes(hashes); - - ready.into_iter().zip(future).map(|(a, b)| a.or(b)).collect() - } - - /// Returns pool transaction by hash. - pub fn ready_by_hash(&self, hash: &Hash) -> Option>> { - self.ready.by_hash(hash) - } - - /// Makes sure that the transactions in the queues stay within provided limits. - /// - /// Removes and returns worst transactions from the queues and all transactions that depend on - /// them. Technically the worst transaction should be evaluated by computing the entire pending - /// set. We use a simplified approach to remove transactions with the lowest priority first or - /// those that occupy the pool for the longest time in case priority is the same. - pub fn enforce_limits(&mut self, ready: &Limit, future: &Limit) -> Vec>> { - let mut removed = vec![]; - - while ready.is_exceeded(self.ready.len(), self.ready.bytes()) { - // find the worst transaction - let worst = self.ready.fold::, _>(|worst, current| { - let transaction = ¤t.transaction; - worst - .map(|worst| { - // Here we don't use `TransactionRef`'s ordering implementation because - // while it prefers priority like need here, it also prefers older - // transactions for inclusion purposes and limit enforcement needs to prefer - // newer transactions instead and drop the older ones. - match worst.transaction.priority.cmp(&transaction.transaction.priority) { - Ordering::Less => worst, - Ordering::Equal => { - if worst.insertion_id > transaction.insertion_id { - transaction.clone() - } else { - worst - } - } - Ordering::Greater => transaction.clone(), - } - }) - .or_else(|| Some(transaction.clone())) - }); - - if let Some(worst) = worst { - removed.append(&mut self.remove_subtree(&[worst.transaction.hash.clone()])) - } else { - break; - } - } - - while future.is_exceeded(self.future.len(), self.future.bytes()) { - // find the worst transaction - let worst = self.future.fold(|worst, current| match worst { - None => Some(current.clone()), - Some(ref tx) if tx.imported_at > current.imported_at => Some(current.clone()), - other => other, - }); - - if let Some(worst) = worst { - removed.append(&mut self.remove_subtree(&[worst.transaction.hash.clone()])) - } else { - break; - } - } - - removed - } - - /// Removes all transactions represented by the hashes and all other transactions - /// that depend on them. - /// - /// Returns a list of actually removed transactions. - /// NOTE some transactions might still be valid, but were just removed because - /// they were part of a chain, you may attempt to re-import them later. - /// NOTE If you want to remove ready transactions that were already used - /// and you don't want them to be stored in the pool use `prune_tags` method. - pub fn remove_subtree(&mut self, hashes: &[Hash]) -> Vec>> { - let mut removed = self.ready.remove_subtree(hashes); - removed.extend(self.future.remove(hashes)); - removed - } - - /// Removes and returns all transactions from the future queue. - pub fn clear_future(&mut self) -> Vec>> { - self.future.clear() - } - - /// Prunes transactions that provide given list of tags. - /// - /// This will cause all transactions that provide these tags to be removed from the pool, - /// but unlike `remove_subtree`, dependent transactions are not touched. - /// Additional transactions from future queue might be promoted to ready if you satisfy tags - /// that the pool didn't previously know about. - pub fn prune_tags(&mut self, tags: impl IntoIterator) -> PruneStatus { - let mut to_import = vec![]; - let mut pruned = vec![]; - let recently_pruned = &mut self.recently_pruned[self.recently_pruned_index]; - self.recently_pruned_index = (self.recently_pruned_index + 1) % RECENTLY_PRUNED_TAGS; - recently_pruned.clear(); - - for tag in tags { - // make sure to promote any future transactions that could be unlocked - to_import.append(&mut self.future.satisfy_tags(std::iter::once(&tag))); - // and actually prune transactions in ready queue - pruned.append(&mut self.ready.prune_tags(tag.clone())); - // store the tags for next submission - recently_pruned.insert(tag); - } - - let mut promoted = vec![]; - let mut failed = vec![]; - for tx in to_import { - let hash = tx.transaction.hash.clone(); - match self.import_to_ready(tx) { - Ok(res) => promoted.push(res), - Err(e) => { - warn!(target: LOG_TARGET, "[{:?}] Failed to promote during pruning: {:?}", hash, e,); - failed.push(hash) - } - } - } - - PruneStatus { pruned, failed, promoted } - } - - /// Get pool status. - pub fn status(&self) -> PoolStatus { - PoolStatus { - ready: self.ready.len(), - ready_bytes: self.ready.bytes(), - future: self.future.len(), - future_bytes: self.future.bytes(), - } - } -} - -/// Queue limits -#[derive(Debug, Clone)] -pub struct Limit { - /// Maximal number of transactions in the queue. - pub count: usize, - /// Maximal size of encodings of all transactions in the queue. - pub total_bytes: usize, -} - -impl Limit { - /// Returns true if any of the provided values exceeds the limit. - pub fn is_exceeded(&self, count: usize, bytes: usize) -> bool { - self.count < count || self.total_bytes < bytes - } -} - -#[cfg(test)] -mod tests { - use super::*; - - type Hash = u64; - - fn pool() -> BasePool> { - BasePool::default() - } - - const DEFAULT_TX: Transaction> = Transaction { - data: vec![], - bytes: 1, - hash: 1u64, - priority: 5u64, - valid_till: 64u64, - requires: vec![], - provides: vec![], - propagate: true, - source: Source::External, - }; - - #[test] - fn should_import_transaction_to_ready() { - // given - let mut pool = pool(); - - // when - pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX }).unwrap(); - - // then - assert_eq!(pool.ready().count(), 1); - assert_eq!(pool.ready.len(), 1); - } - - #[test] - fn should_not_import_same_transaction_twice() { - // given - let mut pool = pool(); - - // when - pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX }).unwrap(); - pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX }).unwrap_err(); - - // then - assert_eq!(pool.ready().count(), 1); - assert_eq!(pool.ready.len(), 1); - } - - #[test] - fn should_import_transaction_to_future_and_promote_it_later() { - // given - let mut pool = pool(); - - // when - pool.import(Transaction { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], ..DEFAULT_TX }) - .unwrap(); - assert_eq!(pool.ready().count(), 0); - assert_eq!(pool.ready.len(), 0); - pool.import(Transaction { data: vec![2u8], hash: 2, provides: vec![vec![0]], ..DEFAULT_TX }).unwrap(); - - // then - assert_eq!(pool.ready().count(), 2); - assert_eq!(pool.ready.len(), 2); - } - - #[test] - fn should_promote_a_subgraph() { - // given - let mut pool = pool(); - - // when - pool.import(Transaction { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], ..DEFAULT_TX }) - .unwrap(); - pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![2]], ..DEFAULT_TX }).unwrap(); - pool.import(Transaction { - data: vec![2u8], - hash: 2, - requires: vec![vec![1]], - provides: vec![vec![3], vec![2]], - ..DEFAULT_TX - }) - .unwrap(); - pool.import(Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![4]], - ..DEFAULT_TX - }) - .unwrap(); - assert_eq!(pool.ready().count(), 0); - assert_eq!(pool.ready.len(), 0); - - let res = pool - .import(Transaction { data: vec![5u8], hash: 5, provides: vec![vec![0], vec![4]], ..DEFAULT_TX }) - .unwrap(); - - // then - let mut it = pool.ready().map(|tx| tx.data[0]); - - assert_eq!(it.next(), Some(5)); - assert_eq!(it.next(), Some(1)); - assert_eq!(it.next(), Some(2)); - assert_eq!(it.next(), Some(4)); - assert_eq!(it.next(), Some(3)); - assert_eq!(it.next(), None); - assert_eq!(res, Imported::Ready { hash: 5, promoted: vec![1, 2, 3, 4], failed: vec![], removed: vec![] }); - } - - #[test] - fn should_handle_a_cycle() { - // given - let mut pool = pool(); - pool.import(Transaction { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], ..DEFAULT_TX }) - .unwrap(); - pool.import(Transaction { - data: vec![3u8], - hash: 3, - requires: vec![vec![1]], - provides: vec![vec![2]], - ..DEFAULT_TX - }) - .unwrap(); - assert_eq!(pool.ready().count(), 0); - assert_eq!(pool.ready.len(), 0); - - // when - pool.import(Transaction { - data: vec![2u8], - hash: 2, - requires: vec![vec![2]], - provides: vec![vec![0]], - ..DEFAULT_TX - }) - .unwrap(); - - // then - { - let mut it = pool.ready().map(|tx| tx.data[0]); - assert_eq!(it.next(), None); - } - // all transactions occupy the Future queue - it's fine - assert_eq!(pool.future.len(), 3); - - // let's close the cycle with one additional transaction - let res = pool - .import(Transaction { data: vec![4u8], hash: 4, priority: 50u64, provides: vec![vec![0]], ..DEFAULT_TX }) - .unwrap(); - let mut it = pool.ready().map(|tx| tx.data[0]); - assert_eq!(it.next(), Some(4)); - assert_eq!(it.next(), Some(1)); - assert_eq!(it.next(), Some(3)); - assert_eq!(it.next(), None); - assert_eq!(res, Imported::Ready { hash: 4, promoted: vec![1, 3], failed: vec![2], removed: vec![] }); - assert_eq!(pool.future.len(), 0); - } - - #[test] - fn should_handle_a_cycle_with_low_priority() { - // given - let mut pool = pool(); - pool.import(Transaction { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], ..DEFAULT_TX }) - .unwrap(); - pool.import(Transaction { - data: vec![3u8], - hash: 3, - requires: vec![vec![1]], - provides: vec![vec![2]], - ..DEFAULT_TX - }) - .unwrap(); - assert_eq!(pool.ready().count(), 0); - assert_eq!(pool.ready.len(), 0); - - // when - pool.import(Transaction { - data: vec![2u8], - hash: 2, - requires: vec![vec![2]], - provides: vec![vec![0]], - ..DEFAULT_TX - }) - .unwrap(); - - // then - { - let mut it = pool.ready().map(|tx| tx.data[0]); - assert_eq!(it.next(), None); - } - // all transactions occupy the Future queue - it's fine - assert_eq!(pool.future.len(), 3); - - // let's close the cycle with one additional transaction - let err = pool - .import(Transaction { - data: vec![4u8], - hash: 4, - priority: 1u64, // lower priority than Tx(2) - provides: vec![vec![0]], - ..DEFAULT_TX - }) - .unwrap_err(); - let mut it = pool.ready().map(|tx| tx.data[0]); - assert_eq!(it.next(), None); - assert_eq!(pool.ready.len(), 0); - assert_eq!(pool.future.len(), 0); - if let error::Error::CycleDetected = err { - } else { - unreachable!("Invalid error kind: {:?}", err); - } - } - - #[test] - fn should_remove_invalid_transactions() { - // given - let mut pool = pool(); - pool.import(Transaction { data: vec![5u8], hash: 5, provides: vec![vec![0], vec![4]], ..DEFAULT_TX }).unwrap(); - pool.import(Transaction { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], ..DEFAULT_TX }) - .unwrap(); - pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![2]], ..DEFAULT_TX }).unwrap(); - pool.import(Transaction { - data: vec![2u8], - hash: 2, - requires: vec![vec![1]], - provides: vec![vec![3], vec![2]], - ..DEFAULT_TX - }) - .unwrap(); - pool.import(Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![4]], - ..DEFAULT_TX - }) - .unwrap(); - // future - pool.import(Transaction { - data: vec![6u8], - hash: 6, - priority: 1_000u64, - requires: vec![vec![11]], - ..DEFAULT_TX - }) - .unwrap(); - assert_eq!(pool.ready().count(), 5); - assert_eq!(pool.future.len(), 1); - - // when - pool.remove_subtree(&[6, 1]); - - // then - assert_eq!(pool.ready().count(), 1); - assert_eq!(pool.future.len(), 0); - } - - #[test] - fn should_prune_ready_transactions() { - // given - let mut pool = pool(); - // future (waiting for 0) - pool.import(Transaction { - data: vec![5u8], - hash: 5, - requires: vec![vec![0]], - provides: vec![vec![100]], - ..DEFAULT_TX - }) - .unwrap(); - // ready - pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX }).unwrap(); - pool.import(Transaction { - data: vec![2u8], - hash: 2, - requires: vec![vec![2]], - provides: vec![vec![3]], - ..DEFAULT_TX - }) - .unwrap(); - pool.import(Transaction { - data: vec![3u8], - hash: 3, - requires: vec![vec![1]], - provides: vec![vec![2]], - ..DEFAULT_TX - }) - .unwrap(); - pool.import(Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![2]], - provides: vec![vec![4]], - ..DEFAULT_TX - }) - .unwrap(); - - assert_eq!(pool.ready().count(), 4); - assert_eq!(pool.future.len(), 1); - - // when - let result = pool.prune_tags(vec![vec![0], vec![2]]); - - // then - assert_eq!(result.pruned.len(), 2); - assert_eq!(result.failed.len(), 0); - assert_eq!(result.promoted[0], Imported::Ready { hash: 5, promoted: vec![], failed: vec![], removed: vec![] }); - assert_eq!(result.promoted.len(), 1); - assert_eq!(pool.future.len(), 0); - assert_eq!(pool.ready.len(), 3); - assert_eq!(pool.ready().count(), 3); - } - - #[test] - fn transaction_debug() { - assert_eq!( - format!( - "{:?}", - Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![2]], - provides: vec![vec![4]], - ..DEFAULT_TX - } - ), - "Transaction { hash: 4, priority: 1000, valid_till: 64, bytes: 1, propagate: true, source: \ - TransactionSource::External, requires: [03, 02], provides: [04], data: [4]}" - .to_owned() - ); - } - - #[test] - fn transaction_propagation() { - assert!( - Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![2]], - provides: vec![vec![4]], - ..DEFAULT_TX - } - .is_propagable(), - ); - - assert!( - !Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![2]], - provides: vec![vec![4]], - propagate: false, - ..DEFAULT_TX - } - .is_propagable(), - ); - } - - #[test] - fn should_reject_future_transactions() { - // given - let mut pool = pool(); - - // when - pool.reject_future_transactions = true; - - // then - let err = pool.import(Transaction { data: vec![5u8], hash: 5, requires: vec![vec![0]], ..DEFAULT_TX }); - - if let Err(error::Error::RejectedFutureTransaction) = err { - } else { - unreachable!("Invalid error kind: {:?}", err); - } - } - - #[test] - fn should_clear_future_queue() { - // given - let mut pool = pool(); - - // when - pool.import(Transaction { data: vec![5u8], hash: 5, requires: vec![vec![0]], ..DEFAULT_TX }).unwrap(); - - // then - assert_eq!(pool.future.len(), 1); - - // and then when - assert_eq!(pool.clear_future().len(), 1); - - // then - assert_eq!(pool.future.len(), 0); - } - - #[test] - fn should_accept_future_transactions_when_explicitly_asked_to() { - // given - let mut pool = pool(); - pool.reject_future_transactions = true; - - // when - let flag_value = pool.with_futures_enabled(|pool, flag| { - pool.import(Transaction { data: vec![5u8], hash: 5, requires: vec![vec![0]], ..DEFAULT_TX }).unwrap(); - - flag - }); - - // then - assert!(flag_value); - assert!(pool.reject_future_transactions); - assert_eq!(pool.future.len(), 1); - } -} diff --git a/crates/client/transaction-pool/src/graph/future.rs b/crates/client/transaction-pool/src/graph/future.rs deleted file mode 100644 index 0d699e8003..0000000000 --- a/crates/client/transaction-pool/src/graph/future.rs +++ /dev/null @@ -1,236 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; -use std::time::Instant; -use std::{fmt, hash}; - -use sp_core::hexdisplay::HexDisplay; -use sp_runtime::transaction_validity::TransactionTag as Tag; - -use super::base_pool::Transaction; - -/// Transaction with partially satisfied dependencies. -pub struct WaitingTransaction { - /// Transaction details. - pub transaction: Arc>, - /// Tags that are required and have not been satisfied yet by other transactions in the pool. - pub missing_tags: HashSet, - /// Time of import to the Future Queue. - pub imported_at: Instant, -} - -impl fmt::Debug for WaitingTransaction { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "WaitingTransaction {{ ")?; - write!(fmt, "imported_at: {:?}, ", self.imported_at)?; - write!(fmt, "transaction: {:?}, ", self.transaction)?; - write!( - fmt, - "missing_tags: {{{}}}", - self.missing_tags.iter().map(|tag| HexDisplay::from(tag).to_string()).collect::>().join(", "), - )?; - write!(fmt, "}}") - } -} - -impl Clone for WaitingTransaction { - fn clone(&self) -> Self { - Self { - transaction: self.transaction.clone(), - missing_tags: self.missing_tags.clone(), - imported_at: self.imported_at, - } - } -} - -impl WaitingTransaction { - /// Creates a new `WaitingTransaction`. - /// - /// Computes the set of missing tags based on the requirements and tags that - /// are provided by all transactions in the ready queue. - pub fn new( - transaction: Transaction, - provided: &HashMap, - recently_pruned: &[HashSet], - ) -> Self { - let missing_tags = transaction - .requires - .iter() - .filter(|tag| { - // is true if the tag is already satisfied either via transaction in the pool - // or one that was recently included. - let is_provided = provided.contains_key(&**tag) || recently_pruned.iter().any(|x| x.contains(&**tag)); - !is_provided - }) - .cloned() - .collect(); - - Self { transaction: Arc::new(transaction), missing_tags, imported_at: Instant::now() } - } - - /// Marks the tag as satisfied. - pub fn satisfy_tag(&mut self, tag: &Tag) { - self.missing_tags.remove(tag); - } - - /// Returns true if transaction has all requirements satisfied. - pub fn is_ready(&self) -> bool { - self.missing_tags.is_empty() - } -} - -/// A pool of transactions that are not yet ready to be included in the block. -/// -/// Contains transactions that are still awaiting for some other transactions that -/// could provide a tag that they require. -#[derive(Debug)] -pub struct FutureTransactions { - /// tags that are not yet provided by any transaction and we await for them - wanted_tags: HashMap>, - /// Transactions waiting for a particular other transaction - waiting: HashMap>, -} - -impl Default for FutureTransactions { - fn default() -> Self { - Self { wanted_tags: Default::default(), waiting: Default::default() } - } -} - -const WAITING_PROOF: &str = r"# -In import we always insert to `waiting` if we push to `wanted_tags`; -when removing from `waiting` we always clear `wanted_tags`; -every hash from `wanted_tags` is always present in `waiting`; -qed -#"; - -impl FutureTransactions { - /// Import transaction to Future queue. - /// - /// Only transactions that don't have all their tags satisfied should occupy - /// the Future queue. - /// As soon as required tags are provided by some other transactions that are ready - /// we should remove the transactions from here and move them to the Ready queue. - pub fn import(&mut self, tx: WaitingTransaction) { - assert!(!tx.is_ready(), "Transaction is ready."); - assert!(!self.waiting.contains_key(&tx.transaction.hash), "Transaction is already imported."); - - // Add all tags that are missing - for tag in &tx.missing_tags { - let entry = self.wanted_tags.entry(tag.clone()).or_default(); - entry.insert(tx.transaction.hash.clone()); - } - - // Add the transaction to a by-hash waiting map - self.waiting.insert(tx.transaction.hash.clone(), tx); - } - - /// Returns true if given hash is part of the queue. - pub fn contains(&self, hash: &Hash) -> bool { - self.waiting.contains_key(hash) - } - - /// Returns a list of known transactions - pub fn by_hashes(&self, hashes: &[Hash]) -> Vec>>> { - hashes.iter().map(|h| self.waiting.get(h).map(|x| x.transaction.clone())).collect() - } - - /// Satisfies provided tags in transactions that are waiting for them. - /// - /// Returns (and removes) transactions that became ready after their last tag got - /// satisfied and now we can remove them from Future and move to Ready queue. - pub fn satisfy_tags>( - &mut self, - tags: impl IntoIterator, - ) -> Vec> { - let mut became_ready = vec![]; - - for tag in tags { - if let Some(hashes) = self.wanted_tags.remove(tag.as_ref()) { - for hash in hashes { - let is_ready = { - let tx = self.waiting.get_mut(&hash).expect(WAITING_PROOF); - tx.satisfy_tag(tag.as_ref()); - tx.is_ready() - }; - - if is_ready { - let tx = self.waiting.remove(&hash).expect(WAITING_PROOF); - became_ready.push(tx); - } - } - } - } - - became_ready - } - - /// Removes transactions for given list of hashes. - /// - /// Returns a list of actually removed transactions. - pub fn remove(&mut self, hashes: &[Hash]) -> Vec>> { - let mut removed = vec![]; - for hash in hashes { - if let Some(waiting_tx) = self.waiting.remove(hash) { - // remove from wanted_tags as well - for tag in waiting_tx.missing_tags { - let remove = if let Some(wanted) = self.wanted_tags.get_mut(&tag) { - wanted.remove(hash); - wanted.is_empty() - } else { - false - }; - if remove { - self.wanted_tags.remove(&tag); - } - } - // add to result - removed.push(waiting_tx.transaction) - } - } - removed - } - - /// Fold a list of future transactions to compute a single value. - pub fn fold, &WaitingTransaction) -> Option>(&mut self, f: F) -> Option { - self.waiting.values().fold(None, f) - } - - /// Returns iterator over all future transactions - pub fn all(&self) -> impl Iterator> { - self.waiting.values().map(|waiting| &*waiting.transaction) - } - - /// Removes and returns all future transactions. - pub fn clear(&mut self) -> Vec>> { - self.wanted_tags.clear(); - self.waiting.drain().map(|(_, tx)| tx.transaction).collect() - } - - /// Returns number of transactions in the Future queue. - pub fn len(&self) -> usize { - self.waiting.len() - } - - /// Returns sum of encoding lengths of all transactions in this queue. - pub fn bytes(&self) -> usize { - self.waiting.values().fold(0, |acc, tx| acc + tx.transaction.bytes) - } -} diff --git a/crates/client/transaction-pool/src/graph/listener.rs b/crates/client/transaction-pool/src/graph/listener.rs deleted file mode 100644 index 6970e7557b..0000000000 --- a/crates/client/transaction-pool/src/graph/listener.rs +++ /dev/null @@ -1,145 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::collections::HashMap; -use std::fmt::Debug; -use std::hash; - -use linked_hash_map::LinkedHashMap; -use log::{debug, trace}; -use serde::Serialize; -use sp_runtime::traits; - -use super::{watcher, BlockHash, ChainApi, ExtrinsicHash}; -use crate::LOG_TARGET; - -/// Extrinsic pool default listener. -pub struct Listener { - watchers: HashMap>>, - finality_watchers: LinkedHashMap, Vec>, -} - -/// Maximum number of blocks awaiting finality at any time. -const MAX_FINALITY_WATCHERS: usize = 512; - -impl Default for Listener { - fn default() -> Self { - Self { watchers: Default::default(), finality_watchers: Default::default() } - } -} - -impl Listener { - fn fire(&mut self, hash: &H, fun: F) - where - F: FnOnce(&mut watcher::Sender>), - { - let clean = if let Some(h) = self.watchers.get_mut(hash) { - fun(h); - h.is_done() - } else { - false - }; - - if clean { - self.watchers.remove(hash); - } - } - - /// Creates a new watcher for given verified extrinsic. - /// - /// The watcher can be used to subscribe to life-cycle events of that extrinsic. - pub fn create_watcher(&mut self, hash: H) -> watcher::Watcher> { - let sender = self.watchers.entry(hash.clone()).or_default(); - sender.new_watcher(hash) - } - - /// Notify the listeners about extrinsic broadcast. - pub fn broadcasted(&mut self, hash: &H, peers: Vec) { - trace!(target: LOG_TARGET, "[{:?}] Broadcasted", hash); - self.fire(hash, |watcher| watcher.broadcast(peers)); - } - - /// New transaction was added to the ready pool or promoted from the future pool. - pub fn ready(&mut self, tx: &H, old: Option<&H>) { - trace!(target: LOG_TARGET, "[{:?}] Ready (replaced with {:?})", tx, old); - self.fire(tx, |watcher| watcher.ready()); - if let Some(old) = old { - self.fire(old, |watcher| watcher.usurped(tx.clone())); - } - } - - /// New transaction was added to the future pool. - pub fn future(&mut self, tx: &H) { - trace!(target: LOG_TARGET, "[{:?}] Future", tx); - self.fire(tx, |watcher| watcher.future()); - } - - /// Transaction was dropped from the pool because of the limit. - pub fn dropped(&mut self, tx: &H, by: Option<&H>) { - trace!(target: LOG_TARGET, "[{:?}] Dropped (replaced with {:?})", tx, by); - self.fire(tx, |watcher| match by { - Some(t) => watcher.usurped(t.clone()), - None => watcher.dropped(), - }) - } - - /// Transaction was removed as invalid. - pub fn invalid(&mut self, tx: &H) { - debug!(target: LOG_TARGET, "[{:?}] Extrinsic invalid", tx); - self.fire(tx, |watcher| watcher.invalid()); - } - - /// Transaction was pruned from the pool. - pub fn pruned(&mut self, block_hash: BlockHash, tx: &H) { - debug!(target: LOG_TARGET, "[{:?}] Pruned at {:?}", tx, block_hash); - // Get the transactions included in the given block hash. - let txs = self.finality_watchers.entry(block_hash).or_default(); - txs.push(tx.clone()); - // Current transaction is the last one included. - let tx_index = txs.len() - 1; - - self.fire(tx, |watcher| watcher.in_block(block_hash, tx_index)); - - while self.finality_watchers.len() > MAX_FINALITY_WATCHERS { - if let Some((hash, txs)) = self.finality_watchers.pop_front() { - for tx in txs { - self.fire(&tx, |watcher| watcher.finality_timeout(hash)); - } - } - } - } - - /// The block this transaction was included in has been retracted. - pub fn retracted(&mut self, block_hash: BlockHash) { - if let Some(hashes) = self.finality_watchers.remove(&block_hash) { - for hash in hashes { - self.fire(&hash, |watcher| watcher.retracted(block_hash)) - } - } - } - - /// Notify all watchers that transactions have been finalized - pub fn finalized(&mut self, block_hash: BlockHash) { - if let Some(hashes) = self.finality_watchers.remove(&block_hash) { - for (tx_index, hash) in hashes.into_iter().enumerate() { - log::debug!(target: LOG_TARGET, "[{:?}] Sent finalization event (block {:?})", hash, block_hash,); - self.fire(&hash, |watcher| watcher.finalized(block_hash, tx_index)) - } - } - } -} diff --git a/crates/client/transaction-pool/src/graph/mod.rs b/crates/client/transaction-pool/src/graph/mod.rs deleted file mode 100644 index f6d2f123fd..0000000000 --- a/crates/client/transaction-pool/src/graph/mod.rs +++ /dev/null @@ -1,45 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Generic Transaction Pool -//! -//! The pool is based on dependency graph between transactions -//! and their priority. -//! The pool is able to return an iterator that traverses transaction -//! graph in the correct order taking into account priorities and dependencies. - -#![warn(missing_docs)] -#![warn(unused_extern_crates)] - -mod future; -mod listener; -mod pool; -mod ready; -mod rotator; -mod tracked_map; -mod validated_pool; - -pub mod base_pool; -pub mod watcher; - -pub use validated_pool::{IsValidator, ValidatedTransaction}; - -pub use self::base_pool::Transaction; -pub use self::pool::{ - BlockHash, ChainApi, EventStream, ExtrinsicFor, ExtrinsicHash, NumberFor, Options, Pool, TransactionFor, -}; diff --git a/crates/client/transaction-pool/src/graph/pool.rs b/crates/client/transaction-pool/src/graph/pool.rs deleted file mode 100644 index e34b54439d..0000000000 --- a/crates/client/transaction-pool/src/graph/pool.rs +++ /dev/null @@ -1,440 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::collections::HashMap; -use std::sync::Arc; -use std::time::{Duration, Instant}; - -use futures::channel::mpsc::Receiver; -use futures::Future; -use sc_transaction_pool::{Options as ScOptions, PoolLimit as ScPoolLimit}; -use sc_transaction_pool_api::error; -use sp_blockchain::TreeRoute; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{self, Block as BlockT, SaturatedConversion}; -use sp_runtime::transaction_validity::{ - TransactionSource, TransactionTag as Tag, TransactionValidity, TransactionValidityError, -}; - -use super::base_pool as base; -use super::validated_pool::{IsValidator, ValidatedPool, ValidatedTransaction}; -use super::watcher::Watcher; -use crate::LOG_TARGET; - -/// Modification notification event stream type; -pub type EventStream = Receiver; - -/// Block hash type for a pool. -pub type BlockHash = <::Block as traits::Block>::Hash; -/// Extrinsic hash type for a pool. -pub type ExtrinsicHash = <::Block as traits::Block>::Hash; -/// Extrinsic type for a pool. -pub type ExtrinsicFor = <::Block as traits::Block>::Extrinsic; -/// Block number type for the ChainApi -pub type NumberFor = traits::NumberFor<::Block>; -/// A type of transaction stored in the pool -pub type TransactionFor = Arc, ExtrinsicFor>>; -/// A type of validated transaction stored in the pool. -pub type ValidatedTransactionFor = ValidatedTransaction, ExtrinsicFor, ::Error>; - -/// Concrete extrinsic validation and query logic. -pub trait ChainApi: Send + Sync { - /// Block type. - type Block: BlockT; - /// Error type. - type Error: From + error::IntoPoolError; - /// Validate transaction future. - type ValidationFuture: Future> + Send + Unpin; - /// Body future (since block body might be remote) - type BodyFuture: Future::Extrinsic>>, Self::Error>> - + Unpin - + Send - + 'static; - - /// Verify extrinsic at given block. - fn validate_transaction( - &self, - at: &BlockId, - source: TransactionSource, - uxt: ExtrinsicFor, - ) -> Self::ValidationFuture; - - /// Returns a block number given the block id. - fn block_id_to_number(&self, at: &BlockId) -> Result>, Self::Error>; - - /// Returns a block hash given the block id. - fn block_id_to_hash(&self, at: &BlockId) - -> Result::Hash>, Self::Error>; - - /// Returns hash and encoding length of the extrinsic. - fn hash_and_length(&self, uxt: &ExtrinsicFor) -> (ExtrinsicHash, usize); - - /// Returns a block body given the block. - fn block_body(&self, at: ::Hash) -> Self::BodyFuture; - - /// Returns a block header given the block id. - fn block_header( - &self, - at: ::Hash, - ) -> Result::Header>, Self::Error>; - - /// Compute a tree-route between two blocks. See [`TreeRoute`] for more details. - fn tree_route( - &self, - from: ::Hash, - to: ::Hash, - ) -> Result, Self::Error>; -} - -/// Pool configuration options. -#[derive(Debug, Clone)] -pub struct Options { - /// Ready queue limits. - pub ready: base::Limit, - /// Future queue limits. - pub future: base::Limit, - /// Reject future transactions. - pub reject_future_transactions: bool, - /// How long the extrinsic is banned for. - pub ban_time: Duration, -} - -impl Default for Options { - fn default() -> Self { - Self { - ready: base::Limit { count: 8192, total_bytes: 20 * 1024 * 1024 }, - future: base::Limit { count: 512, total_bytes: 1024 * 1024 }, - reject_future_transactions: false, - ban_time: Duration::from_secs(60 * 30), - } - } -} - -// CONVERSIONS FROM Substrate Client types to our types - -/// Convert from Substrate Client's `PoolOptions` to our `Options`. -impl From for Options { - fn from(opts: ScOptions) -> Self { - Self { - ready: base::Limit::from(opts.ready), - future: base::Limit::from(opts.future), - reject_future_transactions: opts.reject_future_transactions, - ban_time: opts.ban_time, - } - } -} - -/// Convert from Substrate Client's `PoolLimit` to our `base::Limit`. -impl From for base::Limit { - fn from(value: ScPoolLimit) -> Self { - Self { count: value.count, total_bytes: value.total_bytes } - } -} - -/// Should we check that the transaction is banned -/// in the pool, before we verify it? -#[derive(Copy, Clone)] -enum CheckBannedBeforeVerify { - Yes, - No, -} - -/// Extrinsics pool that performs validation. -pub struct Pool { - validated_pool: Arc>, -} - -impl Pool { - /// Create a new transaction pool. - pub fn new(options: Options, is_validator: IsValidator, api: Arc) -> Self { - Self { validated_pool: Arc::new(ValidatedPool::new(options, is_validator, api)) } - } - - /// Imports a bunch of unverified extrinsics to the pool - pub async fn submit_at( - &self, - at: &BlockId, - source: TransactionSource, - xts: impl IntoIterator>, - ) -> Result, B::Error>>, B::Error> { - let xts = xts.into_iter().map(|xt| (source, xt)); - let validated_transactions = self.verify(at, xts, CheckBannedBeforeVerify::Yes).await?; - Ok(self.validated_pool.submit(validated_transactions.into_values())) - } - - /// Resubmit the given extrinsics to the pool. - /// - /// This does not check if a transaction is banned, before we verify it again. - pub async fn resubmit_at( - &self, - at: &BlockId, - source: TransactionSource, - xts: impl IntoIterator>, - ) -> Result, B::Error>>, B::Error> { - let xts = xts.into_iter().map(|xt| (source, xt)); - let validated_transactions = self.verify(at, xts, CheckBannedBeforeVerify::No).await?; - Ok(self.validated_pool.submit(validated_transactions.into_values())) - } - - /// Imports one unverified extrinsic to the pool - pub async fn submit_one( - &self, - at: &BlockId, - source: TransactionSource, - xt: ExtrinsicFor, - ) -> Result, B::Error> { - let res = self.submit_at(at, source, std::iter::once(xt)).await?.pop(); - res.expect("One extrinsic passed; one result returned; qed") - } - - /// Import a single extrinsic and starts to watch its progress in the pool. - pub async fn submit_and_watch( - &self, - at: &BlockId, - source: TransactionSource, - xt: ExtrinsicFor, - ) -> Result, ExtrinsicHash>, B::Error> { - let block_number = self.resolve_block_number(at)?; - let (_, tx) = self.verify_one(at, block_number, source, xt, CheckBannedBeforeVerify::Yes).await; - self.validated_pool.submit_and_watch(tx) - } - - /// Resubmit some transaction that were validated elsewhere. - pub fn resubmit(&self, revalidated_transactions: HashMap, ValidatedTransactionFor>) { - let now = Instant::now(); - self.validated_pool.resubmit(revalidated_transactions); - log::debug!( - target: LOG_TARGET, - "Resubmitted. Took {} ms. Status: {:?}", - now.elapsed().as_millis(), - self.validated_pool.status() - ); - } - - /// Prunes known ready transactions. - /// - /// Used to clear the pool from transactions that were part of recently imported block. - /// The main difference from the `prune` is that we do not revalidate any transactions - /// and ignore unknown passed hashes. - pub fn prune_known(&self, at: &BlockId, hashes: &[ExtrinsicHash]) -> Result<(), B::Error> { - // Get details of all extrinsics that are already in the pool - let in_pool_tags = self.validated_pool.extrinsics_tags(hashes).into_iter().flatten().flatten(); - - // Prune all transactions that provide given tags - let prune_status = self.validated_pool.prune_tags(in_pool_tags)?; - let pruned_transactions = hashes.iter().cloned().chain(prune_status.pruned.iter().map(|tx| tx.hash)); - self.validated_pool.fire_pruned(at, pruned_transactions) - } - - /// Prunes ready transactions. - /// - /// Used to clear the pool from transactions that were part of recently imported block. - /// To perform pruning we need the tags that each extrinsic provides and to avoid calling - /// into runtime too often we first lookup all extrinsics that are in the pool and get - /// their provided tags from there. Otherwise we query the runtime at the `parent` block. - pub async fn prune( - &self, - at: &BlockId, - parent: &BlockId, - extrinsics: &[ExtrinsicFor], - ) -> Result<(), B::Error> { - log::debug!(target: LOG_TARGET, "Starting pruning of block {:?} (extrinsics: {})", at, extrinsics.len()); - // Get details of all extrinsics that are already in the pool - let in_pool_hashes = extrinsics.iter().map(|extrinsic| self.hash_of(extrinsic)).collect::>(); - let in_pool_tags = self.validated_pool.extrinsics_tags(&in_pool_hashes); - - // Zip the ones from the pool with the full list (we get pairs `(Extrinsic, - // Option>)`) - let all = extrinsics.iter().zip(in_pool_tags.into_iter()); - - let mut future_tags = Vec::new(); - for (extrinsic, in_pool_tags) in all { - match in_pool_tags { - // reuse the tags for extrinsics that were found in the pool - Some(tags) => future_tags.extend(tags), - // if it's not found in the pool query the runtime at parent block - // to get validity info and tags that the extrinsic provides. - None => { - // Avoid validating block txs if the pool is empty - if !self.validated_pool.status().is_empty() { - let validity = self - .validated_pool - .api() - .validate_transaction(parent, TransactionSource::InBlock, extrinsic.clone()) - .await; - - if let Ok(Ok(validity)) = validity { - future_tags.extend(validity.provides); - } - } else { - log::trace!(target: LOG_TARGET, "txpool is empty, skipping validation for block {at:?}",); - } - } - } - } - - self.prune_tags(at, future_tags, in_pool_hashes).await - } - - /// Prunes ready transactions that provide given list of tags. - /// - /// Given tags are assumed to be always provided now, so all transactions - /// in the Future Queue that require that particular tag (and have other - /// requirements satisfied) are promoted to Ready Queue. - /// - /// Moreover for each provided tag we remove transactions in the pool that: - /// 1. Provide that tag directly - /// 2. Are a dependency of pruned transaction. - /// - /// Returns transactions that have been removed from the pool and must be reverified - /// before reinserting to the pool. - /// - /// By removing predecessor transactions as well we might actually end up - /// pruning too much, so all removed transactions are reverified against - /// the runtime (`validate_transaction`) to make sure they are invalid. - /// - /// However we avoid revalidating transactions that are contained within - /// the second parameter of `known_imported_hashes`. These transactions - /// (if pruned) are not revalidated and become temporarily banned to - /// prevent importing them in the (near) future. - pub async fn prune_tags( - &self, - at: &BlockId, - tags: impl IntoIterator, - known_imported_hashes: impl IntoIterator> + Clone, - ) -> Result<(), B::Error> { - log::debug!(target: LOG_TARGET, "Pruning at {:?}", at); - // Prune all transactions that provide given tags - let prune_status = self.validated_pool.prune_tags(tags)?; - - // Make sure that we don't revalidate extrinsics that were part of the recently - // imported block. This is especially important for UTXO-like chains cause the - // inputs are pruned so such transaction would go to future again. - self.validated_pool.ban(&Instant::now(), known_imported_hashes.clone().into_iter()); - - // Try to re-validate pruned transactions since some of them might be still valid. - // note that `known_imported_hashes` will be rejected here due to temporary ban. - let pruned_hashes = prune_status.pruned.iter().map(|tx| tx.hash).collect::>(); - let pruned_transactions = prune_status.pruned.into_iter().map(|tx| (tx.source, tx.data.clone())); - - let reverified_transactions = self.verify(at, pruned_transactions, CheckBannedBeforeVerify::Yes).await?; - - log::trace!(target: LOG_TARGET, "Pruning at {:?}. Resubmitting transactions.", at); - // And finally - submit reverified transactions back to the pool - - self.validated_pool.resubmit_pruned( - at, - known_imported_hashes, - pruned_hashes, - reverified_transactions.into_values().collect(), - ) - } - - /// Returns transaction hash - pub fn hash_of(&self, xt: &ExtrinsicFor) -> ExtrinsicHash { - self.validated_pool.api().hash_and_length(xt).0 - } - - /// Resolves block number by id. - fn resolve_block_number(&self, at: &BlockId) -> Result, B::Error> { - self.validated_pool - .api() - .block_id_to_number(at) - .and_then(|number| number.ok_or_else(|| error::Error::InvalidBlockId(format!("{:?}", at)).into())) - } - - /// Returns future that validates a bunch of transactions at given block. - async fn verify( - &self, - at: &BlockId, - xts: impl IntoIterator)>, - check: CheckBannedBeforeVerify, - ) -> Result, ValidatedTransactionFor>, B::Error> { - // we need a block number to compute tx validity - let block_number = self.resolve_block_number(at)?; - - let res = futures::future::join_all( - xts.into_iter().map(|(source, xt)| self.verify_one(at, block_number, source, xt, check)), - ) - .await - .into_iter() - .collect::>(); - - Ok(res) - } - - /// Returns future that validates single transaction at given block. - async fn verify_one( - &self, - block_id: &BlockId, - block_number: NumberFor, - source: TransactionSource, - xt: ExtrinsicFor, - check: CheckBannedBeforeVerify, - ) -> (ExtrinsicHash, ValidatedTransactionFor) { - let (hash, bytes) = self.validated_pool.api().hash_and_length(&xt); - - let ignore_banned = matches!(check, CheckBannedBeforeVerify::No); - if let Err(err) = self.validated_pool.check_is_known(&hash, ignore_banned) { - return (hash, ValidatedTransaction::Invalid(hash, err)); - } - - let validation_result = self.validated_pool.api().validate_transaction(block_id, source, xt.clone()).await; - - let status = match validation_result { - Ok(status) => status, - Err(e) => return (hash, ValidatedTransaction::Invalid(hash, e)), - }; - - let validity = match status { - Ok(validity) => { - if validity.provides.is_empty() { - ValidatedTransaction::Invalid(hash, error::Error::NoTagsProvided.into()) - } else { - ValidatedTransaction::valid_at( - block_number.saturated_into::(), - hash, - source, - xt, - bytes, - validity, - ) - } - } - Err(TransactionValidityError::Invalid(e)) => { - ValidatedTransaction::Invalid(hash, error::Error::InvalidTransaction(e).into()) - } - Err(TransactionValidityError::Unknown(e)) => { - ValidatedTransaction::Unknown(hash, error::Error::UnknownTransaction(e).into()) - } - }; - - (hash, validity) - } - - /// get a reference to the underlying validated pool. - pub fn validated_pool(&self) -> &ValidatedPool { - &self.validated_pool - } -} - -impl Clone for Pool { - fn clone(&self) -> Self { - Self { validated_pool: self.validated_pool.clone() } - } -} diff --git a/crates/client/transaction-pool/src/graph/ready.rs b/crates/client/transaction-pool/src/graph/ready.rs deleted file mode 100644 index fa8dda64ff..0000000000 --- a/crates/client/transaction-pool/src/graph/ready.rs +++ /dev/null @@ -1,753 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::sync::Arc; -use std::{cmp, hash}; - -use log::{debug, trace}; -use sc_transaction_pool_api::error; -use serde::Serialize; -use sp_runtime::traits::Member; -use sp_runtime::transaction_validity::TransactionTag as Tag; - -use super::base_pool::Transaction; -use super::future::WaitingTransaction; -use super::tracked_map::{self, TrackedMap}; -use crate::LOG_TARGET; - -type ArcTransaction = Arc>; -type ArcTransactions = Vec>; - -/// An in-pool transaction reference. -/// -/// Should be cheap to clone. -#[derive(Debug)] -pub struct TransactionRef { - /// The actual transaction data. - pub transaction: Arc>, - /// Unique id when transaction was inserted into the pool. - pub insertion_id: u64, -} - -impl Clone for TransactionRef { - fn clone(&self) -> Self { - Self { transaction: self.transaction.clone(), insertion_id: self.insertion_id } - } -} - -impl Ord for TransactionRef { - fn cmp(&self, other: &Self) -> cmp::Ordering { - self.transaction - .priority - .cmp(&other.transaction.priority) - .then_with(|| other.transaction.valid_till.cmp(&self.transaction.valid_till)) - .then_with(|| other.insertion_id.cmp(&self.insertion_id)) - } -} - -impl PartialOrd for TransactionRef { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl PartialEq for TransactionRef { - fn eq(&self, other: &Self) -> bool { - self.cmp(other) == cmp::Ordering::Equal - } -} -impl Eq for TransactionRef {} - -#[derive(Debug)] -pub struct ReadyTx { - /// A reference to a transaction - pub transaction: TransactionRef, - /// A list of transactions that get unlocked by this one - pub unlocks: Vec, - /// How many required tags are provided inherently - /// - /// Some transactions might be already pruned from the queue, - /// so when we compute ready set we may consider this transactions ready earlier. - pub requires_offset: usize, -} - -impl Clone for ReadyTx { - fn clone(&self) -> Self { - Self { - transaction: self.transaction.clone(), - unlocks: self.unlocks.clone(), - requires_offset: self.requires_offset, - } - } -} - -const HASH_READY: &str = r#" -Every time transaction is imported its hash is placed in `ready` map and tags in `provided_tags`; -Every time transaction is removed from the queue we remove the hash from `ready` map and from `provided_tags`; -Hence every hash retrieved from `provided_tags` is always present in `ready`; -qed -"#; - -/// Validated transactions that are block ready with all their dependencies met. -#[derive(Debug)] -pub struct ReadyTransactions { - /// Next free insertion id (used to indicate when a transaction was inserted into the pool). - insertion_id: u64, - /// tags that are provided by Ready transactions - /// (only a single transaction can provide a specific tag) - provided_tags: HashMap, - /// Transactions that are ready (i.e. don't have any requirements external to the pool) - ready: TrackedMap>, - /// Best transactions that are ready to be included to the block without any other previous - /// transaction. - best: BTreeSet>, -} - -impl tracked_map::Size for ReadyTx { - fn size(&self) -> usize { - self.transaction.transaction.bytes - } -} - -impl Default for ReadyTransactions { - fn default() -> Self { - Self { - insertion_id: Default::default(), - provided_tags: Default::default(), - ready: Default::default(), - best: Default::default(), - } - } -} - -impl ReadyTransactions { - /// Borrows a map of tags that are provided by transactions in this queue. - pub fn provided_tags(&self) -> &HashMap { - &self.provided_tags - } - - /// Returns an iterator of ready transactions. - /// - /// Transactions are returned in order: - /// 1. First by the dependencies: - /// - never return transaction that requires a tag, which was not provided by one of the - /// previously - /// returned transactions - /// 2. Then by priority: - /// - If there are two transactions with all requirements satisfied the one with higher priority - /// goes first. - /// 3. Then by the ttl that's left - /// - transactions that are valid for a shorter time go first - /// 4. Lastly we sort by the time in the queue - /// - transactions that are longer in the queue go first - /// - /// The iterator is providing a way to report transactions that the receiver considers invalid. - /// In such case the entire subgraph of transactions that depend on the reported one will be - /// skipped. - pub fn get(&self) -> BestIterator { - BestIterator { - all: self.ready.clone_map(), - best: self.best.clone(), - awaiting: Default::default(), - invalid: Default::default(), - } - } - - /// Imports transactions to the pool of ready transactions. - /// - /// The transaction needs to have all tags satisfied (be ready) by transactions - /// that are in this queue. - /// Returns transactions that were replaced by the one imported. - pub fn import(&mut self, tx: WaitingTransaction) -> error::Result>>> { - assert!(tx.is_ready(), "Only ready transactions can be imported. Missing: {:?}", tx.missing_tags); - assert!(!self.ready.read().contains_key(&tx.transaction.hash), "Transaction is already imported."); - - self.insertion_id += 1; - let insertion_id = self.insertion_id; - let hash = tx.transaction.hash.clone(); - let transaction = tx.transaction; - - let (replaced, unlocks) = self.replace_previous(&transaction)?; - - let mut goes_to_best = true; - let mut ready = self.ready.write(); - let mut requires_offset = 0; - // Add links to transactions that unlock the current one - for tag in &transaction.requires { - // Check if the transaction that satisfies the tag is still in the queue. - if let Some(other) = self.provided_tags.get(tag) { - let tx = ready.get_mut(other).expect(HASH_READY); - tx.unlocks.push(hash.clone()); - // this transaction depends on some other, so it doesn't go to best directly. - goes_to_best = false; - } else { - requires_offset += 1; - } - } - - // update provided_tags - // call to replace_previous guarantees that we will be overwriting - // only entries that have been removed. - for tag in &transaction.provides { - self.provided_tags.insert(tag.clone(), hash.clone()); - } - - let transaction = TransactionRef { insertion_id, transaction }; - - // insert to best if it doesn't require any other transaction to be included before it - if goes_to_best { - self.best.insert(transaction.clone()); - } - - // insert to Ready - ready.insert(hash, ReadyTx { transaction, unlocks, requires_offset }); - - Ok(replaced) - } - - /// Fold a list of ready transactions to compute a single value. - pub fn fold, &ReadyTx) -> Option>(&mut self, f: F) -> Option { - self.ready.read().values().fold(None, f) - } - - /// Returns true if given transaction is part of the queue. - pub fn contains(&self, hash: &Hash) -> bool { - self.ready.read().contains_key(hash) - } - - /// Retrieve transaction by hash - pub fn by_hash(&self, hash: &Hash) -> Option>> { - self.by_hashes(&[hash.clone()]).into_iter().next().unwrap_or(None) - } - - /// Retrieve transactions by hash - pub fn by_hashes(&self, hashes: &[Hash]) -> Vec>>> { - let ready = self.ready.read(); - hashes.iter().map(|hash| ready.get(hash).map(|x| x.transaction.transaction.clone())).collect() - } - - /// Removes a subtree of transactions from the ready pool. - /// - /// NOTE removing a transaction will also cause a removal of all transactions that depend on - /// that one (i.e. the entire subgraph that this transaction is a start of will be removed). - /// All removed transactions are returned. - pub fn remove_subtree(&mut self, hashes: &[Hash]) -> Vec>> { - let to_remove = hashes.to_vec(); - self.remove_subtree_with_tag_filter(to_remove, None) - } - - /// Removes a subtrees of transactions trees starting from roots given in `to_remove`. - /// - /// We proceed with a particular branch only if there is at least one provided tag - /// that is not part of `provides_tag_filter`. I.e. the filter contains tags - /// that will stay in the pool, so that we can early exit and avoid descending. - fn remove_subtree_with_tag_filter( - &mut self, - mut to_remove: Vec, - provides_tag_filter: Option>, - ) -> Vec>> { - let mut removed = vec![]; - let mut ready = self.ready.write(); - while let Some(hash) = to_remove.pop() { - if let Some(mut tx) = ready.remove(&hash) { - let invalidated = - tx.transaction.transaction.provides.iter().filter(|tag| { - provides_tag_filter.as_ref().map(|filter| !filter.contains(&**tag)).unwrap_or(true) - }); - - let mut removed_some_tags = false; - // remove entries from provided_tags - for tag in invalidated { - removed_some_tags = true; - self.provided_tags.remove(tag); - } - - // remove from unlocks - for tag in &tx.transaction.transaction.requires { - if let Some(hash) = self.provided_tags.get(tag) { - if let Some(tx) = ready.get_mut(hash) { - remove_item(&mut tx.unlocks, hash); - } - } - } - - // remove from best - self.best.remove(&tx.transaction); - - if removed_some_tags { - // remove all transactions that the current one unlocks - to_remove.append(&mut tx.unlocks); - } - - // add to removed - trace!(target: LOG_TARGET, "[{:?}] Removed as part of the subtree.", hash); - removed.push(tx.transaction.transaction); - } - } - - removed - } - - /// Removes transactions that provide given tag. - /// - /// All transactions that lead to a transaction, which provides this tag - /// are going to be removed from the queue, but no other transactions are touched - - /// i.e. all other subgraphs starting from given tag are still considered valid & ready. - pub fn prune_tags(&mut self, tag: Tag) -> Vec>> { - let mut removed = vec![]; - let mut to_remove = vec![tag]; - - while let Some(tag) = to_remove.pop() { - let res = self.provided_tags.remove(&tag).and_then(|hash| self.ready.write().remove(&hash)); - - if let Some(tx) = res { - let unlocks = tx.unlocks; - - // Make sure we remove it from best txs - self.best.remove(&tx.transaction); - - let tx = tx.transaction.transaction; - - // prune previous transactions as well - { - let hash = &tx.hash; - let mut ready = self.ready.write(); - let mut find_previous = |tag| -> Option> { - let prev_hash = self.provided_tags.get(tag)?; - let tx2 = ready.get_mut(prev_hash)?; - remove_item(&mut tx2.unlocks, hash); - // We eagerly prune previous transactions as well. - // But it might not always be good. - // Possible edge case: - // - tx provides two tags - // - the second tag enables some subgraph we don't know of yet - // - we will prune the transaction - // - when we learn about the subgraph it will go to future - // - we will have to wait for re-propagation of that transaction - // Alternatively the caller may attempt to re-import these transactions. - if tx2.unlocks.is_empty() { Some(tx2.transaction.transaction.provides.clone()) } else { None } - }; - - // find previous transactions - for tag in &tx.requires { - if let Some(mut tags_to_remove) = find_previous(tag) { - to_remove.append(&mut tags_to_remove); - } - } - } - - // add the transactions that just got unlocked to `best` - for hash in unlocks { - if let Some(tx) = self.ready.write().get_mut(&hash) { - tx.requires_offset += 1; - // this transaction is ready - if tx.requires_offset == tx.transaction.transaction.requires.len() { - self.best.insert(tx.transaction.clone()); - } - } - } - - // we also need to remove all other tags that this transaction provides, - // but since all the hard work is done, we only clear the provided_tag -> hash - // mapping. - let current_tag = &tag; - for tag in &tx.provides { - let removed = self.provided_tags.remove(tag); - assert_eq!( - removed.as_ref(), - if current_tag == tag { None } else { Some(&tx.hash) }, - "The pool contains exactly one transaction providing given tag; the removed transaction - claims to provide that tag, so it has to be mapped to it's hash; qed" - ); - } - - removed.push(tx); - } - } - - removed - } - - /// Checks if the transaction is providing the same tags as other transactions. - /// - /// In case that's true it determines if the priority of transactions that - /// we are about to replace is lower than the priority of the replacement transaction. - /// We remove/replace old transactions in case they have lower priority. - /// - /// In case replacement is successful returns a list of removed transactions - /// and a list of hashes that are still in pool and gets unlocked by the new transaction. - fn replace_previous( - &mut self, - tx: &Transaction, - ) -> error::Result<(ArcTransactions, Vec)> { - let (to_remove, unlocks) = { - // check if we are replacing a transaction - let replace_hashes = - tx.provides.iter().filter_map(|tag| self.provided_tags.get(tag)).collect::>(); - - // early exit if we are not replacing anything. - if replace_hashes.is_empty() { - return Ok((vec![], vec![])); - } - - // now check if collective priority is lower than the replacement transaction. - let old_priority = { - let ready = self.ready.read(); - replace_hashes - .iter() - .filter_map(|hash| ready.get(hash)) - .fold(0u64, |total, tx| total.saturating_add(tx.transaction.transaction.priority)) - }; - - // bail - the transaction has too low priority to replace the old ones - if old_priority >= tx.priority { - return Err(error::Error::TooLowPriority { old: old_priority, new: tx.priority }); - } - - // construct a list of unlocked transactions - let unlocks = { - let ready = self.ready.read(); - replace_hashes.iter().filter_map(|hash| ready.get(hash)).fold(vec![], |mut list, tx| { - list.extend(tx.unlocks.iter().cloned()); - list - }) - }; - - (replace_hashes.into_iter().cloned().collect::>(), unlocks) - }; - - let new_provides = tx.provides.iter().cloned().collect::>(); - let removed = self.remove_subtree_with_tag_filter(to_remove, Some(new_provides)); - - Ok((removed, unlocks)) - } - - /// Returns number of transactions in this queue. - pub fn len(&self) -> usize { - self.ready.len() - } - - /// Returns sum of encoding lengths of all transactions in this queue. - pub fn bytes(&self) -> usize { - self.ready.bytes() - } -} - -/// Iterator of ready transactions ordered by priority. -pub struct BestIterator { - all: HashMap>, - awaiting: HashMap)>, - best: BTreeSet>, - invalid: HashSet, -} - -impl BestIterator { - /// Depending on number of satisfied requirements insert given ref - /// either to awaiting set or to best set. - fn best_or_awaiting(&mut self, satisfied: usize, tx_ref: TransactionRef) { - if satisfied >= tx_ref.transaction.requires.len() { - // If we have satisfied all deps insert to best - self.best.insert(tx_ref); - } else { - // otherwise we're still awaiting for some deps - self.awaiting.insert(tx_ref.transaction.hash.clone(), (satisfied, tx_ref)); - } - } -} - -impl sc_transaction_pool_api::ReadyTransactions for BestIterator { - fn report_invalid(&mut self, tx: &Self::Item) { - BestIterator::report_invalid(self, tx) - } -} - -impl BestIterator { - /// Report given transaction as invalid. - /// - /// As a consequence, all values that depend on the invalid one will be skipped. - /// When given transaction is not in the pool it has no effect. - /// When invoked on a fully drained iterator it has no effect either. - pub fn report_invalid(&mut self, tx: &Arc>) { - if let Some(to_report) = self.all.get(&tx.hash) { - debug!( - target: LOG_TARGET, - "[{:?}] Reported as invalid. Will skip sub-chains while iterating.", - to_report.transaction.transaction.hash - ); - for hash in &to_report.unlocks { - self.invalid.insert(hash.clone()); - } - } - } -} - -impl Iterator for BestIterator { - type Item = Arc>; - - fn next(&mut self) -> Option { - loop { - let best = self.best.iter().next_back()?.clone(); - let best = self.best.take(&best)?; - let hash = &best.transaction.hash; - - // Check if the transaction was marked invalid. - if self.invalid.contains(hash) { - debug!(target: LOG_TARGET, "[{:?}] Skipping invalid child transaction while iterating.", hash,); - continue; - } - - let ready = match self.all.get(hash).cloned() { - Some(ready) => ready, - // The transaction is not in all, maybe it was removed in the meantime? - None => continue, - }; - - // Insert transactions that just got unlocked. - for hash in &ready.unlocks { - // first check local awaiting transactions - let res = if let Some((mut satisfied, tx_ref)) = self.awaiting.remove(hash) { - satisfied += 1; - Some((satisfied, tx_ref)) - // then get from the pool - } else { - self.all.get(hash).map(|next| (next.requires_offset + 1, next.transaction.clone())) - }; - if let Some((satisfied, tx_ref)) = res { - self.best_or_awaiting(satisfied, tx_ref) - } - } - - return Some(best.transaction); - } - } -} - -// See: https://github.com/rust-lang/rust/issues/40062 -fn remove_item(vec: &mut Vec, item: &T) { - if let Some(idx) = vec.iter().position(|i| i == item) { - vec.swap_remove(idx); - } -} - -#[cfg(test)] -mod tests { - use sp_runtime::transaction_validity::TransactionSource as Source; - - use super::*; - - fn tx(id: u8) -> Transaction> { - Transaction { - data: vec![id], - bytes: 1, - hash: id as u64, - priority: 1, - valid_till: 2, - requires: vec![vec![1], vec![2]], - provides: vec![vec![3], vec![4]], - propagate: true, - source: Source::External, - } - } - - fn import( - ready: &mut ReadyTransactions, - tx: Transaction, - ) -> error::Result>>> { - let x = WaitingTransaction::new(tx, ready.provided_tags(), &[]); - ready.import(x) - } - - #[test] - fn should_replace_transaction_that_provides_the_same_tag() { - // given - let mut ready = ReadyTransactions::default(); - let mut tx1 = tx(1); - tx1.requires.clear(); - let mut tx2 = tx(2); - tx2.requires.clear(); - tx2.provides = vec![vec![3]]; - let mut tx3 = tx(3); - tx3.requires.clear(); - tx3.provides = vec![vec![4]]; - - // when - import(&mut ready, tx2).unwrap(); - import(&mut ready, tx3).unwrap(); - assert_eq!(ready.get().count(), 2); - - // too low priority - import(&mut ready, tx1.clone()).unwrap_err(); - - tx1.priority = 10; - import(&mut ready, tx1).unwrap(); - - // then - assert_eq!(ready.get().count(), 1); - } - - #[test] - fn should_replace_multiple_transactions_correctly() { - // given - let mut ready = ReadyTransactions::default(); - let mut tx0 = tx(0); - tx0.requires = vec![]; - tx0.provides = vec![vec![0]]; - let mut tx1 = tx(1); - tx1.requires = vec![]; - tx1.provides = vec![vec![1]]; - let mut tx2 = tx(2); - tx2.requires = vec![vec![0], vec![1]]; - tx2.provides = vec![vec![2], vec![3]]; - let mut tx3 = tx(3); - tx3.requires = vec![vec![2]]; - tx3.provides = vec![vec![4]]; - let mut tx4 = tx(4); - tx4.requires = vec![vec![3]]; - tx4.provides = vec![vec![5]]; - // replacement - let mut tx2_2 = tx(5); - tx2_2.requires = vec![vec![0], vec![1]]; - tx2_2.provides = vec![vec![2]]; - tx2_2.priority = 10; - - for tx in vec![tx0, tx1, tx2, tx3, tx4] { - import(&mut ready, tx).unwrap(); - } - assert_eq!(ready.get().count(), 5); - - // when - import(&mut ready, tx2_2).unwrap(); - - // then - assert_eq!(ready.get().count(), 3); - } - - /// Populate the pool, with a graph that looks like so: - /// - /// tx1 -> tx2 \ - /// -> -> tx3 - /// -> tx4 -> tx5 -> tx6 - /// -> tx7 - fn populate_pool(ready: &mut ReadyTransactions>) { - let mut tx1 = tx(1); - tx1.requires.clear(); - let mut tx2 = tx(2); - tx2.requires = tx1.provides.clone(); - tx2.provides = vec![vec![106]]; - let mut tx3 = tx(3); - tx3.requires = vec![tx1.provides[0].clone(), vec![106]]; - tx3.provides = vec![]; - let mut tx4 = tx(4); - tx4.requires = vec![tx1.provides[0].clone()]; - tx4.provides = vec![vec![107]]; - let mut tx5 = tx(5); - tx5.requires = vec![tx4.provides[0].clone()]; - tx5.provides = vec![vec![108]]; - let mut tx6 = tx(6); - tx6.requires = vec![tx5.provides[0].clone()]; - tx6.provides = vec![]; - let tx7 = Transaction { - data: vec![7], - bytes: 1, - hash: 7, - priority: 1, - valid_till: u64::MAX, // use the max here for testing. - requires: vec![tx1.provides[0].clone()], - provides: vec![], - propagate: true, - source: Source::External, - }; - - // when - for tx in vec![tx1, tx2, tx3, tx7, tx4, tx5, tx6] { - import(ready, tx).unwrap(); - } - - assert_eq!(ready.best.len(), 1); - } - - #[test] - fn should_return_best_transactions_in_correct_order() { - // given - let mut ready = ReadyTransactions::default(); - populate_pool(&mut ready); - - // when - let mut it = ready.get().map(|tx| tx.data[0]); - - // then - assert_eq!(it.next(), Some(1)); - assert_eq!(it.next(), Some(2)); - assert_eq!(it.next(), Some(3)); - assert_eq!(it.next(), Some(4)); - assert_eq!(it.next(), Some(5)); - assert_eq!(it.next(), Some(6)); - assert_eq!(it.next(), Some(7)); - assert_eq!(it.next(), None); - } - - #[test] - fn should_order_refs() { - let mut id = 1; - let mut with_priority = |priority, longevity| { - id += 1; - let mut tx = tx(id); - tx.priority = priority; - tx.valid_till = longevity; - tx - }; - // higher priority = better - assert!( - TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 1 } - > TransactionRef { transaction: Arc::new(with_priority(2, 3)), insertion_id: 2 } - ); - // lower validity = better - assert!( - TransactionRef { transaction: Arc::new(with_priority(3, 2)), insertion_id: 1 } - > TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 2 } - ); - // lower insertion_id = better - assert!( - TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 1 } - > TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 2 } - ); - } - - #[test] - fn should_skip_invalid_transactions_while_iterating() { - // given - let mut ready = ReadyTransactions::default(); - populate_pool(&mut ready); - - // when - let mut it = ready.get(); - let data = |tx: &Arc>>| tx.data[0]; - - // then - assert_eq!(it.next().as_ref().map(data), Some(1)); - assert_eq!(it.next().as_ref().map(data), Some(2)); - assert_eq!(it.next().as_ref().map(data), Some(3)); - let tx4 = it.next(); - assert_eq!(tx4.as_ref().map(data), Some(4)); - // report 4 as invalid, which should skip 5 & 6. - it.report_invalid(&tx4.unwrap()); - assert_eq!(it.next().as_ref().map(data), Some(7)); - assert_eq!(it.next().as_ref().map(data), None); - } -} diff --git a/crates/client/transaction-pool/src/graph/rotator.rs b/crates/client/transaction-pool/src/graph/rotator.rs deleted file mode 100644 index 8887b1bc8a..0000000000 --- a/crates/client/transaction-pool/src/graph/rotator.rs +++ /dev/null @@ -1,212 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Rotate extrinsic inside the pool. -//! -//! Keeps only recent extrinsic and discard the ones kept for a significant amount of time. -//! Discarded extrinsics are banned so that they don't get re-imported again. - -use std::collections::HashMap; -use std::time::{Duration, Instant}; -use std::{hash, iter}; - -use parking_lot::RwLock; - -use super::base_pool::Transaction; - -/// Expected size of the banned extrinsics cache. -const EXPECTED_SIZE: usize = 2048; - -/// Pool rotator is responsible to only keep fresh extrinsics in the pool. -/// -/// Extrinsics that occupy the pool for too long are culled and temporarily banned from entering -/// the pool again. -pub struct PoolRotator { - /// How long the extrinsic is banned for. - ban_time: Duration, - /// Currently banned extrinsics. - banned_until: RwLock>, -} - -impl Default for PoolRotator { - fn default() -> Self { - Self { ban_time: Duration::from_secs(60 * 30), banned_until: Default::default() } - } -} - -impl PoolRotator { - /// New rotator instance with specified ban time. - pub fn new(ban_time: Duration) -> Self { - Self { ban_time, banned_until: Default::default() } - } - - /// Returns `true` if extrinsic hash is currently banned. - pub fn is_banned(&self, hash: &Hash) -> bool { - self.banned_until.read().contains_key(hash) - } - - /// Bans given set of hashes. - pub fn ban(&self, now: &Instant, hashes: impl IntoIterator) { - let mut banned = self.banned_until.write(); - - for hash in hashes { - banned.insert(hash, *now + self.ban_time); - } - - if banned.len() > 2 * EXPECTED_SIZE { - while banned.len() > EXPECTED_SIZE { - if let Some(key) = banned.keys().next().cloned() { - banned.remove(&key); - } - } - } - } - - /// Bans extrinsic if it's stale. - /// - /// Returns `true` if extrinsic is stale and got banned. - pub fn ban_if_stale(&self, now: &Instant, current_block: u64, xt: &Transaction) -> bool { - if xt.valid_till > current_block { - return false; - } - - self.ban(now, iter::once(xt.hash.clone())); - true - } - - /// Removes timed bans. - pub fn clear_timeouts(&self, now: &Instant) { - let mut banned = self.banned_until.write(); - - banned.retain(|_, &mut v| v >= *now); - } -} - -#[cfg(test)] -mod tests { - use sp_runtime::transaction_validity::TransactionSource; - - use super::*; - - type Hash = u64; - type Ex = (); - - fn rotator() -> PoolRotator { - PoolRotator { ban_time: Duration::from_millis(10), ..Default::default() } - } - - fn tx() -> (Hash, Transaction) { - let hash = 5u64; - let tx = Transaction { - data: (), - bytes: 1, - hash, - priority: 5, - valid_till: 1, - requires: vec![], - provides: vec![], - propagate: true, - source: TransactionSource::External, - }; - - (hash, tx) - } - - #[test] - fn should_not_ban_if_not_stale() { - // given - let (hash, tx) = tx(); - let rotator = rotator(); - assert!(!rotator.is_banned(&hash)); - let now = Instant::now(); - let past_block = 0; - - // when - assert!(!rotator.ban_if_stale(&now, past_block, &tx)); - - // then - assert!(!rotator.is_banned(&hash)); - } - - #[test] - fn should_ban_stale_extrinsic() { - // given - let (hash, tx) = tx(); - let rotator = rotator(); - assert!(!rotator.is_banned(&hash)); - - // when - assert!(rotator.ban_if_stale(&Instant::now(), 1, &tx)); - - // then - assert!(rotator.is_banned(&hash)); - } - - #[test] - fn should_clear_banned() { - // given - let (hash, tx) = tx(); - let rotator = rotator(); - assert!(rotator.ban_if_stale(&Instant::now(), 1, &tx)); - assert!(rotator.is_banned(&hash)); - - // when - let future = Instant::now() + rotator.ban_time + rotator.ban_time; - rotator.clear_timeouts(&future); - - // then - assert!(!rotator.is_banned(&hash)); - } - - #[test] - fn should_garbage_collect() { - // given - fn tx_with(i: u64, valid_till: u64) -> Transaction { - let hash = i; - Transaction { - data: (), - bytes: 2, - hash, - priority: 5, - valid_till, - requires: vec![], - provides: vec![], - propagate: true, - source: TransactionSource::External, - } - } - - let rotator = rotator(); - - let now = Instant::now(); - let past_block = 0; - - // when - for i in 0..2 * EXPECTED_SIZE { - let tx = tx_with(i as u64, past_block); - assert!(rotator.ban_if_stale(&now, past_block, &tx)); - } - assert_eq!(rotator.banned_until.read().len(), 2 * EXPECTED_SIZE); - - // then - let tx = tx_with(2 * EXPECTED_SIZE as u64, past_block); - // trigger a garbage collection - assert!(rotator.ban_if_stale(&now, past_block, &tx)); - assert_eq!(rotator.banned_until.read().len(), EXPECTED_SIZE); - } -} diff --git a/crates/client/transaction-pool/src/graph/tracked_map.rs b/crates/client/transaction-pool/src/graph/tracked_map.rs deleted file mode 100644 index e7880522c0..0000000000 --- a/crates/client/transaction-pool/src/graph/tracked_map.rs +++ /dev/null @@ -1,167 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::collections::HashMap; -use std::sync::atomic::{AtomicIsize, Ordering as AtomicOrdering}; -use std::sync::Arc; - -use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; - -/// Something that can report its size. -pub trait Size { - fn size(&self) -> usize; -} - -/// Map with size tracking. -/// -/// Size reported might be slightly off and only approximately true. -#[derive(Debug)] -pub struct TrackedMap { - index: Arc>>, - bytes: AtomicIsize, - length: AtomicIsize, -} - -impl Default for TrackedMap { - fn default() -> Self { - Self { index: Arc::new(HashMap::default().into()), bytes: 0.into(), length: 0.into() } - } -} - -impl TrackedMap { - /// Current tracked length of the content. - pub fn len(&self) -> usize { - std::cmp::max(self.length.load(AtomicOrdering::Relaxed), 0) as usize - } - - /// Current sum of content length. - pub fn bytes(&self) -> usize { - std::cmp::max(self.bytes.load(AtomicOrdering::Relaxed), 0) as usize - } - - /// Lock map for read. - pub fn read(&self) -> TrackedMapReadAccess { - TrackedMapReadAccess { inner_guard: self.index.read() } - } - - /// Lock map for write. - pub fn write(&self) -> TrackedMapWriteAccess { - TrackedMapWriteAccess { inner_guard: self.index.write(), bytes: &self.bytes, length: &self.length } - } -} - -impl TrackedMap { - /// Clone the inner map. - pub fn clone_map(&self) -> HashMap { - self.index.read().clone() - } -} - -pub struct TrackedMapReadAccess<'a, K, V> { - inner_guard: RwLockReadGuard<'a, HashMap>, -} - -impl<'a, K, V> TrackedMapReadAccess<'a, K, V> -where - K: Eq + std::hash::Hash, -{ - /// Returns true if map contains key. - pub fn contains_key(&self, key: &K) -> bool { - self.inner_guard.contains_key(key) - } - - /// Returns reference to the contained value by key, if exists. - pub fn get(&self, key: &K) -> Option<&V> { - self.inner_guard.get(key) - } - - /// Returns iterator over all values. - pub fn values(&self) -> std::collections::hash_map::Values { - self.inner_guard.values() - } -} - -pub struct TrackedMapWriteAccess<'a, K, V> { - bytes: &'a AtomicIsize, - length: &'a AtomicIsize, - inner_guard: RwLockWriteGuard<'a, HashMap>, -} - -impl<'a, K, V> TrackedMapWriteAccess<'a, K, V> -where - K: Eq + std::hash::Hash, - V: Size, -{ - /// Insert value and return previous (if any). - pub fn insert(&mut self, key: K, val: V) -> Option { - let new_bytes = val.size(); - self.bytes.fetch_add(new_bytes as isize, AtomicOrdering::Relaxed); - self.length.fetch_add(1, AtomicOrdering::Relaxed); - self.inner_guard.insert(key, val).map(|old_val| { - self.bytes.fetch_sub(old_val.size() as isize, AtomicOrdering::Relaxed); - self.length.fetch_sub(1, AtomicOrdering::Relaxed); - old_val - }) - } - - /// Remove value by key. - pub fn remove(&mut self, key: &K) -> Option { - let val = self.inner_guard.remove(key); - if let Some(size) = val.as_ref().map(Size::size) { - self.bytes.fetch_sub(size as isize, AtomicOrdering::Relaxed); - self.length.fetch_sub(1, AtomicOrdering::Relaxed); - } - val - } - - /// Returns mutable reference to the contained value by key, if exists. - pub fn get_mut(&mut self, key: &K) -> Option<&mut V> { - self.inner_guard.get_mut(key) - } -} - -#[cfg(test)] -mod tests { - - use super::*; - - impl Size for i32 { - fn size(&self) -> usize { - *self as usize / 10 - } - } - - #[test] - fn basic() { - let map = TrackedMap::default(); - map.write().insert(5, 10); - map.write().insert(6, 20); - - assert_eq!(map.bytes(), 3); - assert_eq!(map.len(), 2); - - map.write().insert(6, 30); - - assert_eq!(map.bytes(), 4); - assert_eq!(map.len(), 2); - - map.write().remove(&6); - assert_eq!(map.bytes(), 1); - assert_eq!(map.len(), 1); - } -} diff --git a/crates/client/transaction-pool/src/graph/validated_pool.rs b/crates/client/transaction-pool/src/graph/validated_pool.rs deleted file mode 100644 index ae566cf15e..0000000000 --- a/crates/client/transaction-pool/src/graph/validated_pool.rs +++ /dev/null @@ -1,625 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::collections::{HashMap, HashSet}; -use std::hash; -use std::sync::Arc; -use std::time::Instant; - -use futures::channel::mpsc::{channel, Sender}; -use parking_lot::{Mutex, RwLock}; -use sc_transaction_pool_api::{error, PoolStatus, ReadyTransactions}; -use serde::Serialize; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{self, SaturatedConversion}; -use sp_runtime::transaction_validity::{TransactionSource, TransactionTag as Tag, ValidTransaction}; - -use super::base_pool::{self as base, PruneStatus}; -use super::listener::Listener; -use super::pool::{BlockHash, ChainApi, EventStream, ExtrinsicFor, ExtrinsicHash, Options, TransactionFor}; -use super::rotator::PoolRotator; -use super::watcher::Watcher; -use crate::LOG_TARGET; - -/// Pre-validated transaction. Validated pool only accepts transactions wrapped in this enum. -#[derive(Debug)] -pub enum ValidatedTransaction { - /// Transaction that has been validated successfully. - Valid(base::Transaction), - /// Transaction that is invalid. - Invalid(Hash, Error), - /// Transaction which validity can't be determined. - /// - /// We're notifying watchers about failure, if 'unknown' transaction is submitted. - Unknown(Hash, Error), -} - -impl ValidatedTransaction { - /// Consume validity result, transaction data and produce ValidTransaction. - pub fn valid_at( - at: u64, - hash: Hash, - source: TransactionSource, - data: Ex, - bytes: usize, - validity: ValidTransaction, - ) -> Self { - Self::Valid(base::Transaction { - data, - bytes, - hash, - source, - priority: validity.priority, - requires: validity.requires, - provides: validity.provides, - propagate: validity.propagate, - valid_till: at.saturated_into::().saturating_add(validity.longevity), - }) - } -} - -/// A type of validated transaction stored in the pool. -pub type ValidatedTransactionFor = ValidatedTransaction, ExtrinsicFor, ::Error>; - -/// A closure that returns true if the local node is a validator that can author blocks. -pub struct IsValidator(Box bool + Send + Sync>); - -impl From for IsValidator { - fn from(is_validator: bool) -> Self { - Self(Box::new(move || is_validator)) - } -} - -impl From bool + Send + Sync>> for IsValidator { - fn from(is_validator: Box bool + Send + Sync>) -> Self { - Self(is_validator) - } -} - -/// Pool that deals with validated transactions. -pub struct ValidatedPool { - api: Arc, - is_validator: IsValidator, - options: Options, - listener: RwLock, B>>, - pool: RwLock, ExtrinsicFor>>, - import_notification_sinks: Mutex>>>, - rotator: PoolRotator>, -} - -impl ValidatedPool { - /// Create a new transaction pool. - pub fn new(options: Options, is_validator: IsValidator, api: Arc) -> Self { - let base_pool = base::BasePool::new(options.reject_future_transactions); - let ban_time = options.ban_time; - Self { - is_validator, - options, - listener: Default::default(), - api, - pool: RwLock::new(base_pool), - import_notification_sinks: Default::default(), - rotator: PoolRotator::new(ban_time), - } - } - - /// Bans given set of hashes. - pub fn ban(&self, now: &Instant, hashes: impl IntoIterator>) { - self.rotator.ban(now, hashes) - } - - /// Returns true if transaction with given hash is currently banned from the pool. - pub fn is_banned(&self, hash: &ExtrinsicHash) -> bool { - self.rotator.is_banned(hash) - } - - /// A fast check before doing any further processing of a transaction, like validation. - /// - /// If `ignore_banned` is `true`, it will not check if the transaction is banned. - /// - /// It checks if the transaction is already imported or banned. If so, it returns an error. - pub fn check_is_known(&self, tx_hash: &ExtrinsicHash, ignore_banned: bool) -> Result<(), B::Error> { - if !ignore_banned && self.is_banned(tx_hash) { - Err(error::Error::TemporarilyBanned.into()) - } else if self.pool.read().is_imported(tx_hash) { - Err(error::Error::AlreadyImported(Box::new(*tx_hash)).into()) - } else { - Ok(()) - } - } - - /// Imports a bunch of pre-validated transactions to the pool. - pub fn submit( - &self, - txs: impl IntoIterator>, - ) -> Vec, B::Error>> { - let results = txs.into_iter().map(|validated_tx| self.submit_one(validated_tx)).collect::>(); - - // only enforce limits if there is at least one imported transaction - let removed = if results.iter().any(|res| res.is_ok()) { self.enforce_limits() } else { Default::default() }; - - results - .into_iter() - .map(|res| match res { - Ok(ref hash) if removed.contains(hash) => Err(error::Error::ImmediatelyDropped.into()), - other => other, - }) - .collect() - } - - /// Submit single pre-validated transaction to the pool. - fn submit_one(&self, tx: ValidatedTransactionFor) -> Result, B::Error> { - match tx { - ValidatedTransaction::Valid(tx) => { - if !tx.propagate && !(self.is_validator.0)() { - return Err(error::Error::Unactionable.into()); - } - - let imported = self.pool.write().import(tx)?; - - if let base::Imported::Ready { ref hash, .. } = imported { - let sinks = &mut self.import_notification_sinks.lock(); - sinks.retain_mut(|sink| match sink.try_send(*hash) { - Ok(()) => true, - Err(e) => { - if e.is_full() { - log::warn!( - target: LOG_TARGET, - "[{:?}] Trying to notify an import but the channel is full", - hash, - ); - true - } else { - false - } - } - }); - } - - let mut listener = self.listener.write(); - fire_events(&mut *listener, &imported); - Ok(*imported.hash()) - } - ValidatedTransaction::Invalid(hash, err) => { - self.rotator.ban(&Instant::now(), std::iter::once(hash)); - Err(err) - } - ValidatedTransaction::Unknown(hash, err) => { - self.listener.write().invalid(&hash); - Err(err) - } - } - } - - fn enforce_limits(&self) -> HashSet> { - let status = self.pool.read().status(); - let ready_limit = &self.options.ready; - let future_limit = &self.options.future; - - log::debug!(target: LOG_TARGET, "Pool Status: {:?}", status); - if ready_limit.is_exceeded(status.ready, status.ready_bytes) - || future_limit.is_exceeded(status.future, status.future_bytes) - { - log::debug!( - target: LOG_TARGET, - "Enforcing limits ({}/{}kB ready, {}/{}kB future", - ready_limit.count, - ready_limit.total_bytes / 1024, - future_limit.count, - future_limit.total_bytes / 1024, - ); - - // clean up the pool - let removed = { - let mut pool = self.pool.write(); - let removed = - pool.enforce_limits(ready_limit, future_limit).into_iter().map(|x| x.hash).collect::>(); - // ban all removed transactions - self.rotator.ban(&Instant::now(), removed.iter().copied()); - removed - }; - if !removed.is_empty() { - log::debug!(target: LOG_TARGET, "Enforcing limits: {} dropped", removed.len()); - } - - // run notifications - let mut listener = self.listener.write(); - for h in &removed { - listener.dropped(h, None); - } - - removed - } else { - Default::default() - } - } - - /// Import a single extrinsic and starts to watch their progress in the pool. - pub fn submit_and_watch( - &self, - tx: ValidatedTransactionFor, - ) -> Result, ExtrinsicHash>, B::Error> { - match tx { - ValidatedTransaction::Valid(tx) => { - let hash = self.api.hash_and_length(&tx.data).0; - let watcher = self.listener.write().create_watcher(hash); - self.submit(std::iter::once(ValidatedTransaction::Valid(tx))) - .pop() - .expect("One extrinsic passed; one result returned; qed") - .map(|_| watcher) - } - ValidatedTransaction::Invalid(hash, err) => { - self.rotator.ban(&Instant::now(), std::iter::once(hash)); - Err(err) - } - ValidatedTransaction::Unknown(_, err) => Err(err), - } - } - - /// Resubmits revalidated transactions back to the pool. - /// - /// Removes and then submits passed transactions and all dependent transactions. - /// Transactions that are missing from the pool are not submitted. - pub fn resubmit(&self, mut updated_transactions: HashMap, ValidatedTransactionFor>) { - #[derive(Debug, Clone, Copy, PartialEq)] - enum Status { - Future, - Ready, - Failed, - Dropped, - } - - let (mut initial_statuses, final_statuses) = { - let mut pool = self.pool.write(); - - // remove all passed transactions from the ready/future queues - // (this may remove additional transactions as well) - // - // for every transaction that has an entry in the `updated_transactions`, - // we store updated validation result in txs_to_resubmit - // for every transaction that has no entry in the `updated_transactions`, - // we store last validation result (i.e. the pool entry) in txs_to_resubmit - let mut initial_statuses = HashMap::new(); - let mut txs_to_resubmit = Vec::with_capacity(updated_transactions.len()); - while !updated_transactions.is_empty() { - let hash = updated_transactions.keys().next().cloned().expect("transactions is not empty; qed"); - - // note we are not considering tx with hash invalid here - we just want - // to remove it along with dependent transactions and `remove_subtree()` - // does exactly what we need - let removed = pool.remove_subtree(&[hash]); - for removed_tx in removed { - let removed_hash = removed_tx.hash; - let updated_transaction = updated_transactions.remove(&removed_hash); - let tx_to_resubmit = if let Some(updated_tx) = updated_transaction { - updated_tx - } else { - // in most cases we'll end up in successful `try_unwrap`, but if not - // we still need to reinsert transaction back to the pool => duplicate call - let transaction = match Arc::try_unwrap(removed_tx) { - Ok(transaction) => transaction, - Err(transaction) => transaction.duplicate(), - }; - ValidatedTransaction::Valid(transaction) - }; - - initial_statuses.insert(removed_hash, Status::Ready); - txs_to_resubmit.push((removed_hash, tx_to_resubmit)); - } - // make sure to remove the hash even if it's not present in the pool any more. - updated_transactions.remove(&hash); - } - - // if we're rejecting future transactions, then insertion order matters here: - // if tx1 depends on tx2, then if tx1 is inserted before tx2, then it goes - // to the future queue and gets rejected immediately - // => let's temporary stop rejection and clear future queue before return - pool.with_futures_enabled(|pool, reject_future_transactions| { - // now resubmit all removed transactions back to the pool - let mut final_statuses = HashMap::new(); - for (hash, tx_to_resubmit) in txs_to_resubmit { - match tx_to_resubmit { - ValidatedTransaction::Valid(tx) => match pool.import(tx) { - Ok(imported) => match imported { - base::Imported::Ready { promoted, failed, removed, .. } => { - final_statuses.insert(hash, Status::Ready); - for hash in promoted { - final_statuses.insert(hash, Status::Ready); - } - for hash in failed { - final_statuses.insert(hash, Status::Failed); - } - for tx in removed { - final_statuses.insert(tx.hash, Status::Dropped); - } - } - base::Imported::Future { .. } => { - final_statuses.insert(hash, Status::Future); - } - }, - Err(err) => { - // we do not want to fail if single transaction import has failed - // nor we do want to propagate this error, because it could tx - // unknown to caller => let's just notify listeners (and issue debug - // message) - log::warn!( - target: LOG_TARGET, - "[{:?}] Removing invalid transaction from update: {}", - hash, - err, - ); - final_statuses.insert(hash, Status::Failed); - } - }, - ValidatedTransaction::Invalid(_, _) | ValidatedTransaction::Unknown(_, _) => { - final_statuses.insert(hash, Status::Failed); - } - } - } - - // if the pool is configured to reject future transactions, let's clear the future - // queue, updating final statuses as required - if reject_future_transactions { - for future_tx in pool.clear_future() { - final_statuses.insert(future_tx.hash, Status::Dropped); - } - } - - (initial_statuses, final_statuses) - }) - }; - - // and now let's notify listeners about status changes - let mut listener = self.listener.write(); - for (hash, final_status) in final_statuses { - let initial_status = initial_statuses.remove(&hash); - if initial_status.is_none() || Some(final_status) != initial_status { - match final_status { - Status::Future => listener.future(&hash), - Status::Ready => listener.ready(&hash, None), - Status::Dropped => listener.dropped(&hash, None), - Status::Failed => listener.invalid(&hash), - } - } - } - } - - /// For each extrinsic, returns tags that it provides (if known), or None (if it is unknown). - pub fn extrinsics_tags(&self, hashes: &[ExtrinsicHash]) -> Vec>> { - self.pool - .read() - .by_hashes(hashes) - .into_iter() - .map(|existing_in_pool| existing_in_pool.map(|transaction| transaction.provides.to_vec())) - .collect() - } - - /// Get ready transaction by hash - pub fn ready_by_hash(&self, hash: &ExtrinsicHash) -> Option> { - self.pool.read().ready_by_hash(hash) - } - - /// Prunes ready transactions that provide given list of tags. - pub fn prune_tags( - &self, - tags: impl IntoIterator, - ) -> Result, ExtrinsicFor>, B::Error> { - // Perform tag-based pruning in the base pool - let status = self.pool.write().prune_tags(tags); - // Notify event listeners of all transactions - // that were promoted to `Ready` or were dropped. - { - let mut listener = self.listener.write(); - for promoted in &status.promoted { - fire_events(&mut *listener, promoted); - } - for f in &status.failed { - listener.dropped(f, None); - } - } - - Ok(status) - } - - /// Resubmit transactions that have been revalidated after prune_tags call. - pub fn resubmit_pruned( - &self, - at: &BlockId, - known_imported_hashes: impl IntoIterator> + Clone, - pruned_hashes: Vec>, - pruned_xts: Vec>, - ) -> Result<(), B::Error> { - debug_assert_eq!(pruned_hashes.len(), pruned_xts.len()); - - // Resubmit pruned transactions - let results = self.submit(pruned_xts); - - // Collect the hashes of transactions that now became invalid (meaning that they are - // successfully pruned). - let hashes = results.into_iter().enumerate().filter_map(|(idx, r)| { - match r.map_err(error::IntoPoolError::into_pool_error) { - Err(Ok(error::Error::InvalidTransaction(_))) => Some(pruned_hashes[idx]), - _ => None, - } - }); - // Fire `pruned` notifications for collected hashes and make sure to include - // `known_imported_hashes` since they were just imported as part of the block. - let hashes = hashes.chain(known_imported_hashes); - self.fire_pruned(at, hashes)?; - - // perform regular cleanup of old transactions in the pool - // and update temporary bans. - self.clear_stale(at)?; - Ok(()) - } - - /// Fire notifications for pruned transactions. - pub fn fire_pruned( - &self, - at: &BlockId, - hashes: impl Iterator>, - ) -> Result<(), B::Error> { - let header_hash = - self.api.block_id_to_hash(at)?.ok_or_else(|| error::Error::InvalidBlockId(format!("{:?}", at)))?; - let mut listener = self.listener.write(); - let mut set = HashSet::with_capacity(hashes.size_hint().0); - for h in hashes { - // `hashes` has possibly duplicate hashes. - // we'd like to send out the `InBlock` notification only once. - if !set.contains(&h) { - listener.pruned(header_hash, &h); - set.insert(h); - } - } - Ok(()) - } - - /// Removes stale transactions from the pool. - /// - /// Stale transactions are transaction beyond their longevity period. - /// Note this function does not remove transactions that are already included in the chain. - /// See `prune_tags` if you want this. - pub fn clear_stale(&self, at: &BlockId) -> Result<(), B::Error> { - let block_number = self - .api - .block_id_to_number(at)? - .ok_or_else(|| error::Error::InvalidBlockId(format!("{:?}", at)))? - .saturated_into::(); - let now = Instant::now(); - let to_remove = { - self.ready() - .filter(|tx| self.rotator.ban_if_stale(&now, block_number, tx)) - .map(|tx| tx.hash) - .collect::>() - }; - let futures_to_remove: Vec> = { - let p = self.pool.read(); - let mut hashes = Vec::new(); - for tx in p.futures() { - if self.rotator.ban_if_stale(&now, block_number, tx) { - hashes.push(tx.hash); - } - } - hashes - }; - // removing old transactions - self.remove_invalid(&to_remove); - self.remove_invalid(&futures_to_remove); - // clear banned transactions timeouts - self.rotator.clear_timeouts(&now); - - Ok(()) - } - - /// Get api reference. - pub fn api(&self) -> &B { - &self.api - } - - /// Return an event stream of notifications for when transactions are imported to the pool. - /// - /// Consumers of this stream should use the `ready` method to actually get the - /// pending transactions in the right order. - pub fn import_notification_stream(&self) -> EventStream> { - const CHANNEL_BUFFER_SIZE: usize = 1024; - - let (sink, stream) = channel(CHANNEL_BUFFER_SIZE); - self.import_notification_sinks.lock().push(sink); - stream - } - - /// Invoked when extrinsics are broadcasted. - pub fn on_broadcasted(&self, propagated: HashMap, Vec>) { - let mut listener = self.listener.write(); - for (hash, peers) in propagated.into_iter() { - listener.broadcasted(&hash, peers); - } - } - - /// Remove a subtree of transactions from the pool and mark them invalid. - /// - /// The transactions passed as an argument will be additionally banned - /// to prevent them from entering the pool right away. - /// Note this is not the case for the dependent transactions - those may - /// still be valid so we want to be able to re-import them. - pub fn remove_invalid(&self, hashes: &[ExtrinsicHash]) -> Vec> { - // early exit in case there is no invalid transactions. - if hashes.is_empty() { - return vec![]; - } - - log::debug!(target: LOG_TARGET, "Removing invalid transactions: {:?}", hashes); - - // temporarily ban invalid transactions - self.rotator.ban(&Instant::now(), hashes.iter().cloned()); - - let invalid = self.pool.write().remove_subtree(hashes); - - log::debug!(target: LOG_TARGET, "Removed invalid transactions: {:?}", invalid); - - let mut listener = self.listener.write(); - for tx in &invalid { - listener.invalid(&tx.hash); - } - - invalid - } - - /// Get an iterator for ready transactions ordered by priority - pub fn ready(&self) -> impl ReadyTransactions> + Send { - self.pool.read().ready() - } - - /// Returns a Vec of hashes and extrinsics in the future pool. - pub fn futures(&self) -> Vec<(ExtrinsicHash, ExtrinsicFor)> { - self.pool.read().futures().map(|tx| (tx.hash, tx.data.clone())).collect() - } - - /// Returns pool status. - pub fn status(&self) -> PoolStatus { - self.pool.read().status() - } - - /// Notify all watchers that transactions in the block with hash have been finalized - pub async fn on_block_finalized(&self, block_hash: BlockHash) -> Result<(), B::Error> { - log::trace!(target: LOG_TARGET, "Attempting to notify watchers of finalization for {}", block_hash,); - self.listener.write().finalized(block_hash); - Ok(()) - } - - /// Notify the listener of retracted blocks - pub fn on_block_retracted(&self, block_hash: BlockHash) { - self.listener.write().retracted(block_hash) - } -} - -fn fire_events(listener: &mut Listener, imported: &base::Imported) -where - H: hash::Hash + Eq + traits::Member + Serialize, - B: ChainApi, -{ - match *imported { - base::Imported::Ready { ref promoted, ref failed, ref removed, ref hash } => { - listener.ready(hash, None); - failed.iter().for_each(|f| listener.invalid(f)); - removed.iter().for_each(|r| listener.dropped(&r.hash, Some(hash))); - promoted.iter().for_each(|p| listener.ready(p, None)); - } - base::Imported::Future { ref hash } => listener.future(hash), - } -} diff --git a/crates/client/transaction-pool/src/graph/watcher.rs b/crates/client/transaction-pool/src/graph/watcher.rs deleted file mode 100644 index 9f9374f63b..0000000000 --- a/crates/client/transaction-pool/src/graph/watcher.rs +++ /dev/null @@ -1,134 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Extrinsics status updates. - -use futures::Stream; -use sc_transaction_pool_api::TransactionStatus; -use sc_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; - -/// Extrinsic watcher. -/// -/// Represents a stream of status updates for a particular extrinsic. -#[derive(Debug)] -pub struct Watcher { - receiver: TracingUnboundedReceiver>, - /// transaction hash of watched extrinsic - hash: H, -} - -impl Watcher { - /// Returns the transaction hash. - pub fn hash(&self) -> &H { - &self.hash - } - - /// Pipe the notifications to given sink. - /// - /// Make sure to drive the future to completion. - pub fn into_stream(self) -> impl Stream> { - self.receiver - } -} - -/// Sender part of the watcher. Exposed only for testing purposes. -#[derive(Debug)] -pub struct Sender { - receivers: Vec>>, - is_finalized: bool, -} - -impl Default for Sender { - fn default() -> Self { - Sender { receivers: Default::default(), is_finalized: false } - } -} - -impl Sender { - /// Add a new watcher to this sender object. - pub fn new_watcher(&mut self, hash: H) -> Watcher { - let (tx, receiver) = tracing_unbounded("mpsc_txpool_watcher", 100_000); - self.receivers.push(tx); - Watcher { receiver, hash } - } - - /// Transaction became ready. - pub fn ready(&mut self) { - self.send(TransactionStatus::Ready) - } - - /// Transaction was moved to future. - pub fn future(&mut self) { - self.send(TransactionStatus::Future) - } - - /// Some state change (perhaps another extrinsic was included) rendered this extrinsic invalid. - pub fn usurped(&mut self, hash: H) { - self.send(TransactionStatus::Usurped(hash)); - self.is_finalized = true; - } - - /// Extrinsic has been included in block with given hash. - pub fn in_block(&mut self, hash: BH, index: usize) { - self.send(TransactionStatus::InBlock((hash, index))); - } - - /// Extrinsic has been finalized by a finality gadget. - pub fn finalized(&mut self, hash: BH, index: usize) { - self.send(TransactionStatus::Finalized((hash, index))); - self.is_finalized = true; - } - - /// The block this extrinsic was included in has been retracted - pub fn finality_timeout(&mut self, hash: BH) { - self.send(TransactionStatus::FinalityTimeout(hash)); - self.is_finalized = true; - } - - /// The block this extrinsic was included in has been retracted - pub fn retracted(&mut self, hash: BH) { - self.send(TransactionStatus::Retracted(hash)); - } - - /// Extrinsic has been marked as invalid by the block builder. - pub fn invalid(&mut self) { - self.send(TransactionStatus::Invalid); - // we mark as finalized as there are no more notifications - self.is_finalized = true; - } - - /// Transaction has been dropped from the pool because of the limit. - pub fn dropped(&mut self) { - self.send(TransactionStatus::Dropped); - self.is_finalized = true; - } - - /// The extrinsic has been broadcast to the given peers. - pub fn broadcast(&mut self, peers: Vec) { - self.send(TransactionStatus::Broadcast(peers)) - } - - /// Returns true if the are no more listeners for this extrinsic or it was finalized. - pub fn is_done(&self) -> bool { - self.is_finalized || self.receivers.is_empty() - } - - fn send(&mut self, status: TransactionStatus) { - self.receivers.retain(|sender| sender.unbounded_send(status.clone()).is_ok()) - } -} diff --git a/crates/client/transaction-pool/src/lib.rs b/crates/client/transaction-pool/src/lib.rs deleted file mode 100644 index 455d1612da..0000000000 --- a/crates/client/transaction-pool/src/lib.rs +++ /dev/null @@ -1,727 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Substrate transaction pool implementation. -#![recursion_limit = "256"] -#![warn(missing_docs)] -#![warn(unused_extern_crates)] - -mod api; -mod enactment_state; -pub mod error; -mod graph; -mod metrics; -mod revalidation; - -use std::collections::{HashMap, HashSet}; -use std::pin::Pin; -use std::sync::Arc; -use std::time::Instant; - -use async_trait::async_trait; -use enactment_state::{EnactmentAction, EnactmentState}; -use futures::channel::oneshot; -use futures::future::{self, ready}; -use futures::prelude::*; -pub use graph::base_pool::Limit as PoolLimit; -pub use graph::{ChainApi, Options, Pool, Transaction, ValidatedTransaction}; -use graph::{ExtrinsicHash, IsValidator}; -use parking_lot::Mutex; -use prometheus_endpoint::Registry as PrometheusRegistry; -use sc_transaction_pool_api::error::Error as TxPoolError; -use sc_transaction_pool_api::{ - ChainEvent, ImportNotificationStream, MaintainedTransactionPool, PoolFuture, PoolStatus, ReadyTransactions, - TransactionFor, TransactionPool, TransactionSource, TransactionStatusStreamFor, TxHash, -}; -use sp_blockchain::{HashAndNumber, TreeRoute}; -use sp_core::traits::SpawnEssentialNamed; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{AtLeast32Bit, Block as BlockT, Extrinsic, Header as HeaderT, NumberFor, Zero}; - -pub use crate::api::FullChainApi; -use crate::metrics::MetricsLink as PrometheusMetrics; - -pub(crate) const LOG_TARGET: &str = "txpool"; - -type BoxedReadyIterator = - Box>> + Send>; - -type ReadyIteratorFor = BoxedReadyIterator, graph::ExtrinsicFor>; - -type PolledIterator = Pin> + Send>>; - -/// A transaction pool for a full node. -pub type FullPool = BasicPool, Block>; - -/// Basic implementation of transaction pool that can be customized by providing PoolApi. -pub struct BasicPool -where - Block: BlockT, - PoolApi: graph::ChainApi, -{ - pool: Arc>, - api: Arc, - revalidation_strategy: Arc>>>, - revalidation_queue: Arc>, - ready_poll: Arc, Block>>>, - metrics: PrometheusMetrics, - enactment_state: Arc>>, -} - -struct ReadyPoll { - updated_at: NumberFor, - pollers: Vec<(NumberFor, oneshot::Sender)>, -} - -impl Default for ReadyPoll { - fn default() -> Self { - Self { updated_at: NumberFor::::zero(), pollers: Default::default() } - } -} - -impl ReadyPoll { - fn new(best_block_number: NumberFor) -> Self { - Self { updated_at: best_block_number, pollers: Default::default() } - } - - fn trigger(&mut self, number: NumberFor, iterator_factory: impl Fn() -> T) { - self.updated_at = number; - - let mut idx = 0; - while idx < self.pollers.len() { - if self.pollers[idx].0 <= number { - let poller_sender = self.pollers.swap_remove(idx); - log::debug!(target: LOG_TARGET, "Sending ready signal at block {}", number); - let _ = poller_sender.1.send(iterator_factory()); - } else { - idx += 1; - } - } - } - - fn add(&mut self, number: NumberFor) -> oneshot::Receiver { - let (sender, receiver) = oneshot::channel(); - self.pollers.push((number, sender)); - receiver - } - - fn updated_at(&self) -> NumberFor { - self.updated_at - } -} - -/// Type of revalidation. -pub enum RevalidationType { - /// Light revalidation type. - /// - /// During maintenance, transaction pool makes periodic revalidation - /// of all transactions depending on number of blocks or time passed. - /// Also this kind of revalidation does not resubmit transactions from - /// retracted blocks, since it is too expensive. - Light, - - /// Full revalidation type. - /// - /// During maintenance, transaction pool revalidates some fixed amount of - /// transactions from the pool of valid transactions. - Full, -} - -impl BasicPool -where - Block: BlockT, - PoolApi: graph::ChainApi + 'static, -{ - /// Create new basic transaction pool with provided api, for tests. - pub fn new_test( - pool_api: Arc, - best_block_hash: Block::Hash, - finalized_hash: Block::Hash, - ) -> (Self, Pin + Send>>) { - let pool = Arc::new(graph::Pool::new(Default::default(), true.into(), pool_api.clone())); - let (revalidation_queue, background_task) = - revalidation::RevalidationQueue::new_background(pool_api.clone(), pool.clone()); - ( - Self { - api: pool_api, - pool, - revalidation_queue: Arc::new(revalidation_queue), - revalidation_strategy: Arc::new(Mutex::new(RevalidationStrategy::Always)), - ready_poll: Default::default(), - metrics: Default::default(), - enactment_state: Arc::new(Mutex::new(EnactmentState::new(best_block_hash, finalized_hash))), - }, - background_task, - ) - } - - /// Create new basic transaction pool with provided api and custom - /// revalidation type. - #[allow(clippy::too_many_arguments)] - pub fn with_revalidation_type( - options: graph::Options, - is_validator: IsValidator, - pool_api: Arc, - prometheus: Option<&PrometheusRegistry>, - revalidation_type: RevalidationType, - spawner: impl SpawnEssentialNamed, - best_block_number: NumberFor, - best_block_hash: Block::Hash, - finalized_hash: Block::Hash, - ) -> Self { - let pool = Arc::new(graph::Pool::new(options, is_validator, pool_api.clone())); - let (revalidation_queue, background_task) = match revalidation_type { - RevalidationType::Light => (revalidation::RevalidationQueue::new(pool_api.clone(), pool.clone()), None), - RevalidationType::Full => { - let (queue, background) = - revalidation::RevalidationQueue::new_background(pool_api.clone(), pool.clone()); - (queue, Some(background)) - } - }; - - if let Some(background_task) = background_task { - spawner.spawn_essential("txpool-background", Some("transaction-pool"), background_task); - } - - Self { - api: pool_api, - pool, - revalidation_queue: Arc::new(revalidation_queue), - revalidation_strategy: Arc::new(Mutex::new(match revalidation_type { - RevalidationType::Light => RevalidationStrategy::Light(RevalidationStatus::NotScheduled), - RevalidationType::Full => RevalidationStrategy::Always, - })), - ready_poll: Arc::new(Mutex::new(ReadyPoll::new(best_block_number))), - metrics: PrometheusMetrics::new(prometheus), - enactment_state: Arc::new(Mutex::new(EnactmentState::new(best_block_hash, finalized_hash))), - } - } - - /// Gets shared reference to the underlying pool. - pub fn pool(&self) -> &Arc> { - &self.pool - } - - /// Get access to the underlying api - pub fn api(&self) -> &PoolApi { - &self.api - } -} - -impl TransactionPool for BasicPool -where - Block: BlockT, - PoolApi: 'static + graph::ChainApi, -{ - type Block = PoolApi::Block; - type Hash = graph::ExtrinsicHash; - type InPoolTransaction = graph::base_pool::Transaction, TransactionFor>; - type Error = PoolApi::Error; - - fn submit_at( - &self, - at: &BlockId, - source: TransactionSource, - xts: Vec>, - ) -> PoolFuture, Self::Error>>, Self::Error> { - let pool = self.pool.clone(); - let at = *at; - - self.metrics.report(|metrics| metrics.submitted_transactions.inc_by(xts.len() as u64)); - - async move { pool.submit_at(&at, source, xts).await }.boxed() - } - - fn submit_one( - &self, - at: &BlockId, - source: TransactionSource, - xt: TransactionFor, - ) -> PoolFuture, Self::Error> { - let pool = self.pool.clone(); - let at = *at; - - self.metrics.report(|metrics| metrics.submitted_transactions.inc()); - - async move { pool.submit_one(&at, source, xt).await }.boxed() - } - - fn submit_and_watch( - &self, - at: &BlockId, - source: TransactionSource, - xt: TransactionFor, - ) -> PoolFuture>>, Self::Error> { - let at = *at; - let pool = self.pool.clone(); - - self.metrics.report(|metrics| metrics.submitted_transactions.inc()); - - async move { - let watcher = pool.submit_and_watch(&at, source, xt).await?; - - Ok(watcher.into_stream().boxed()) - } - .boxed() - } - - fn remove_invalid(&self, hashes: &[TxHash]) -> Vec> { - let removed = self.pool.validated_pool().remove_invalid(hashes); - self.metrics.report(|metrics| metrics.validations_invalid.inc_by(removed.len() as u64)); - removed - } - - fn status(&self) -> PoolStatus { - self.pool.validated_pool().status() - } - - fn import_notification_stream(&self) -> ImportNotificationStream> { - self.pool.validated_pool().import_notification_stream() - } - - fn hash_of(&self, xt: &TransactionFor) -> TxHash { - self.pool.hash_of(xt) - } - - fn on_broadcasted(&self, propagations: HashMap, Vec>) { - self.pool.validated_pool().on_broadcasted(propagations) - } - - fn ready_transaction(&self, hash: &TxHash) -> Option> { - self.pool.validated_pool().ready_by_hash(hash) - } - - fn ready_at(&self, at: NumberFor) -> PolledIterator { - let status = self.status(); - // If there are no transactions in the pool, it is fine to return early. - // - // There could be transaction being added because of some re-org happening at the relevant - // block, but this is relative unlikely. - if status.ready == 0 && status.future == 0 { - return async { Box::new(std::iter::empty()) as Box<_> }.boxed(); - } - - if self.ready_poll.lock().updated_at() >= at { - log::trace!(target: LOG_TARGET, "Transaction pool already processed block #{}", at); - let iterator: ReadyIteratorFor = Box::new(self.pool.validated_pool().ready()); - return async move { iterator }.boxed(); - } - - self.ready_poll - .lock() - .add(at) - .map(|received| { - received.unwrap_or_else(|e| { - log::warn!("Error receiving pending set: {:?}", e); - Box::new(std::iter::empty()) - }) - }) - .boxed() - } - - fn ready(&self) -> ReadyIteratorFor { - Box::new(self.pool.validated_pool().ready()) - } -} - -impl FullPool -where - Block: BlockT, - Client: sp_api::ProvideRuntimeApi - + sc_client_api::BlockBackend - + sc_client_api::blockchain::HeaderBackend - + sp_runtime::traits::BlockIdTo - + sc_client_api::ExecutorProvider - + sc_client_api::UsageProvider - + sp_blockchain::HeaderMetadata - + Send - + Sync - + 'static, - Client::Api: sp_transaction_pool::runtime_api::TaggedTransactionQueue, -{ - /// Create new basic transaction pool for a full node with the provided api. - pub fn new_full( - options: graph::Options, - is_validator: IsValidator, - prometheus: Option<&PrometheusRegistry>, - spawner: impl SpawnEssentialNamed, - client: Arc, - ) -> Arc { - let pool_api = Arc::new(FullChainApi::new(client.clone(), prometheus, &spawner)); - let pool = Arc::new(Self::with_revalidation_type( - options, - is_validator, - pool_api, - prometheus, - RevalidationType::Full, - spawner, - client.usage_info().chain.best_number, - client.usage_info().chain.best_hash, - client.usage_info().chain.finalized_hash, - )); - - // make transaction pool available for off-chain runtime calls. - client.execution_extensions().register_transaction_pool(&pool); - - pool - } -} - -impl sc_transaction_pool_api::LocalTransactionPool for BasicPool, Block> -where - Block: BlockT, - Client: sp_api::ProvideRuntimeApi - + sc_client_api::BlockBackend - + sc_client_api::blockchain::HeaderBackend - + sp_runtime::traits::BlockIdTo - + sp_blockchain::HeaderMetadata, - Client: Send + Sync + 'static, - Client::Api: sp_transaction_pool::runtime_api::TaggedTransactionQueue, -{ - type Block = Block; - type Hash = graph::ExtrinsicHash>; - type Error = as graph::ChainApi>::Error; - - fn submit_local( - &self, - at: &BlockId, - xt: sc_transaction_pool_api::LocalTransactionFor, - ) -> Result { - use sp_runtime::traits::SaturatedConversion; - use sp_runtime::transaction_validity::TransactionValidityError; - - let validity = - self.api.validate_transaction_blocking(at, TransactionSource::Local, xt.clone())?.map_err(|e| { - Self::Error::Pool(match e { - TransactionValidityError::Invalid(i) => TxPoolError::InvalidTransaction(i), - TransactionValidityError::Unknown(u) => TxPoolError::UnknownTransaction(u), - }) - })?; - - let (hash, bytes) = self.pool.validated_pool().api().hash_and_length(&xt); - let block_number = - self.api.block_id_to_number(at)?.ok_or_else(|| error::Error::BlockIdConversion(format!("{:?}", at)))?; - - let validated = ValidatedTransaction::valid_at( - block_number.saturated_into::(), - hash, - TransactionSource::Local, - xt, - bytes, - validity, - ); - - self.pool.validated_pool().submit(vec![validated]).remove(0) - } -} - -#[cfg_attr(test, derive(Debug))] -enum RevalidationStatus { - /// The revalidation has never been completed. - NotScheduled, - /// The revalidation is scheduled. - Scheduled(Option, Option), - /// The revalidation is in progress. - InProgress, -} - -enum RevalidationStrategy { - Always, - Light(RevalidationStatus), -} - -struct RevalidationAction { - revalidate: bool, - resubmit: bool, -} - -impl RevalidationStrategy { - pub fn clear(&mut self) { - if let Self::Light(status) = self { - status.clear() - } - } - - pub fn next( - &mut self, - block: N, - revalidate_time_period: Option, - revalidate_block_period: Option, - ) -> RevalidationAction { - match self { - Self::Light(status) => RevalidationAction { - revalidate: status.next_required(block, revalidate_time_period, revalidate_block_period), - resubmit: false, - }, - Self::Always => RevalidationAction { revalidate: true, resubmit: true }, - } - } -} - -impl RevalidationStatus { - /// Called when revalidation is completed. - pub fn clear(&mut self) { - *self = Self::NotScheduled; - } - - /// Returns true if revalidation is required. - pub fn next_required( - &mut self, - block: N, - revalidate_time_period: Option, - revalidate_block_period: Option, - ) -> bool { - match *self { - Self::NotScheduled => { - *self = Self::Scheduled( - revalidate_time_period.map(|period| Instant::now() + period), - revalidate_block_period.map(|period| block + period), - ); - false - } - Self::Scheduled(revalidate_at_time, revalidate_at_block) => { - let is_required = revalidate_at_time.map(|at| Instant::now() >= at).unwrap_or(false) - || revalidate_at_block.map(|at| block >= at).unwrap_or(false); - if is_required { - *self = Self::InProgress; - } - is_required - } - Self::InProgress => false, - } - } -} - -/// Prune the known txs for the given block. -async fn prune_known_txs_for_block>( - block_hash: Block::Hash, - api: &Api, - pool: &graph::Pool, -) -> Vec> { - let extrinsics = api - .block_body(block_hash) - .await - .unwrap_or_else(|e| { - log::warn!("Prune known transactions: error request: {}", e); - None - }) - .unwrap_or_default(); - - let hashes = extrinsics.iter().map(|tx| pool.hash_of(tx)).collect::>(); - - log::trace!(target: LOG_TARGET, "Pruning transactions: {:?}", hashes); - - let header = match api.block_header(block_hash) { - Ok(Some(h)) => h, - Ok(None) => { - log::debug!(target: LOG_TARGET, "Could not find header for {:?}.", block_hash); - return hashes; - } - Err(e) => { - log::debug!(target: LOG_TARGET, "Error retrieving header for {:?}: {}", block_hash, e); - return hashes; - } - }; - - if let Err(e) = pool.prune(&BlockId::Hash(block_hash), &BlockId::hash(*header.parent_hash()), &extrinsics).await { - log::error!("Cannot prune known in the pool: {}", e); - } - - hashes -} - -impl BasicPool -where - Block: BlockT, - PoolApi: 'static + graph::ChainApi, -{ - /// Handles enactment and retraction of blocks, prunes stale transactions - /// (that have already been enacted) and resubmits transactions that were - /// retracted. - async fn handle_enactment(&self, tree_route: TreeRoute) { - log::trace!(target: LOG_TARGET, "handle_enactment tree_route: {tree_route:?}"); - let pool = self.pool.clone(); - let api = self.api.clone(); - - let (hash, block_number) = match tree_route.last() { - Some(HashAndNumber { hash, number }) => (hash, number), - None => { - log::warn!(target: LOG_TARGET, "Skipping ChainEvent - no last block in tree route {:?}", tree_route,); - return; - } - }; - - let next_action = self.revalidation_strategy.lock().next( - *block_number, - Some(std::time::Duration::from_secs(60)), - Some(20u32.into()), - ); - - // We keep track of everything we prune so that later we won't add - // transactions with those hashes from the retracted blocks. - let mut pruned_log = HashSet::>::new(); - - // If there is a tree route, we use this to prune known tx based on the enacted - // blocks. Before pruning enacted transactions, we inform the listeners about - // retracted blocks and their transactions. This order is important, because - // if we enact and retract the same transaction at the same time, we want to - // send first the retract and than the prune event. - for retracted in tree_route.retracted() { - // notify txs awaiting finality that it has been retracted - pool.validated_pool().on_block_retracted(retracted.hash); - } - - future::join_all(tree_route.enacted().iter().map(|h| prune_known_txs_for_block(h.hash, &*api, &*pool))) - .await - .into_iter() - .for_each(|enacted_log| { - pruned_log.extend(enacted_log); - }); - - self.metrics.report(|metrics| metrics.block_transactions_pruned.inc_by(pruned_log.len() as u64)); - - if next_action.resubmit { - let mut resubmit_transactions = Vec::new(); - - for retracted in tree_route.retracted() { - let hash = retracted.hash; - - let block_transactions = api - .block_body(hash) - .await - .unwrap_or_else(|e| { - log::warn!("Failed to fetch block body: {}", e); - None - }) - .unwrap_or_default() - .into_iter() - .filter(|tx| tx.is_signed().unwrap_or(true)); - - let mut resubmitted_to_report = 0; - - resubmit_transactions.extend(block_transactions.into_iter().filter(|tx| { - let tx_hash = pool.hash_of(tx); - let contains = pruned_log.contains(&tx_hash); - - // need to count all transactions, not just filtered, here - resubmitted_to_report += 1; - - if !contains { - log::debug!( - target: LOG_TARGET, - "[{:?}]: Resubmitting from retracted block {:?}", - tx_hash, - hash, - ); - } - !contains - })); - - self.metrics.report(|metrics| metrics.block_transactions_resubmitted.inc_by(resubmitted_to_report)); - } - - if let Err(e) = pool - .resubmit_at( - &BlockId::Hash(*hash), - // These transactions are coming from retracted blocks, we should - // simply consider them external. - TransactionSource::External, - resubmit_transactions, - ) - .await - { - log::debug!(target: LOG_TARGET, "[{:?}] Error re-submitting transactions: {}", hash, e,) - } - } - - let extra_pool = pool.clone(); - // After #5200 lands, this arguably might be moved to the - // handler of "all blocks notification". - self.ready_poll.lock().trigger(*block_number, move || Box::new(extra_pool.validated_pool().ready())); - - if next_action.revalidate { - let hashes = pool.validated_pool().ready().map(|tx| tx.hash).collect(); - self.revalidation_queue.revalidate_later(*block_number, hashes).await; - - self.revalidation_strategy.lock().clear(); - } - } -} - -#[async_trait] -impl MaintainedTransactionPool for BasicPool -where - Block: BlockT, - PoolApi: 'static + graph::ChainApi, -{ - async fn maintain(&self, event: ChainEvent) { - let prev_finalized_block = self.enactment_state.lock().recent_finalized_block(); - let compute_tree_route = |from, to| -> Result, String> { - match self.api.tree_route(from, to) { - Ok(tree_route) => Ok(tree_route), - Err(e) => Err(format!("Error occurred while computing tree_route from {from:?} to {to:?}: {e}")), - } - }; - let block_id_to_number = |hash| self.api.block_id_to_number(&BlockId::Hash(hash)).map_err(|e| format!("{}", e)); - - let result = self.enactment_state.lock().update(&event, &compute_tree_route, &block_id_to_number); - - match result { - Err(msg) => { - log::debug!(target: LOG_TARGET, "{msg}"); - self.enactment_state.lock().force_update(&event); - } - Ok(EnactmentAction::Skip) => return, - Ok(EnactmentAction::HandleFinalization) => {} - Ok(EnactmentAction::HandleEnactment(tree_route)) => { - self.handle_enactment(tree_route).await; - } - }; - - if let ChainEvent::Finalized { hash, tree_route } = event { - log::trace!( - target: LOG_TARGET, - "on-finalized enacted: {tree_route:?}, previously finalized: {prev_finalized_block:?}", - ); - - for hash in tree_route.iter().chain(std::iter::once(&hash)) { - if let Err(e) = self.pool.validated_pool().on_block_finalized(*hash).await { - log::warn!( - target: LOG_TARGET, - "Error occurred while attempting to notify watchers about finalization {}: {}", - hash, - e - ) - } - } - } - } -} - -/// Inform the transaction pool about imported and finalized blocks. -pub async fn notification_future(client: Arc, txpool: Arc) -where - Block: BlockT, - Client: sc_client_api::BlockchainEvents, - Pool: MaintainedTransactionPool, -{ - let import_stream = client.import_notification_stream().filter_map(|n| ready(n.try_into().ok())).fuse(); - let finality_stream = client.finality_notification_stream().map(Into::into).fuse(); - - futures::stream::select(import_stream, finality_stream).for_each(|evt| txpool.maintain(evt)).await -} diff --git a/crates/client/transaction-pool/src/metrics.rs b/crates/client/transaction-pool/src/metrics.rs deleted file mode 100644 index 1b5baef356..0000000000 --- a/crates/client/transaction-pool/src/metrics.rs +++ /dev/null @@ -1,126 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Transaction pool Prometheus metrics. - -use std::sync::Arc; - -use prometheus_endpoint::{register, Counter, PrometheusError, Registry, U64}; - -#[derive(Clone, Default)] -pub struct MetricsLink(Arc>); - -impl MetricsLink { - pub fn new(registry: Option<&Registry>) -> Self { - Self(Arc::new(registry.and_then(|registry| { - Metrics::register(registry) - .map_err(|err| { - log::warn!("Failed to register prometheus metrics: {}", err); - }) - .ok() - }))) - } - - pub fn report(&self, do_this: impl FnOnce(&Metrics)) { - if let Some(metrics) = self.0.as_ref() { - do_this(metrics); - } - } -} - -/// Transaction pool Prometheus metrics. -pub struct Metrics { - pub submitted_transactions: Counter, - pub validations_invalid: Counter, - pub block_transactions_pruned: Counter, - pub block_transactions_resubmitted: Counter, -} - -impl Metrics { - pub fn register(registry: &Registry) -> Result { - Ok(Self { - submitted_transactions: register( - Counter::new("substrate_sub_txpool_submitted_transactions", "Total number of transactions submitted")?, - registry, - )?, - validations_invalid: register( - Counter::new( - "substrate_sub_txpool_validations_invalid", - "Total number of transactions that were removed from the pool as invalid", - )?, - registry, - )?, - block_transactions_pruned: register( - Counter::new( - "substrate_sub_txpool_block_transactions_pruned", - "Total number of transactions that was requested to be pruned by block events", - )?, - registry, - )?, - block_transactions_resubmitted: register( - Counter::new( - "substrate_sub_txpool_block_transactions_resubmitted", - "Total number of transactions that was requested to be resubmitted by block events", - )?, - registry, - )?, - }) - } -} - -/// Transaction pool api Prometheus metrics. -pub struct ApiMetrics { - pub validations_scheduled: Counter, - pub validations_finished: Counter, -} - -impl ApiMetrics { - /// Register the metrics at the given Prometheus registry. - pub fn register(registry: &Registry) -> Result { - Ok(Self { - validations_scheduled: register( - Counter::new( - "substrate_sub_txpool_validations_scheduled", - "Total number of transactions scheduled for validation", - )?, - registry, - )?, - validations_finished: register( - Counter::new( - "substrate_sub_txpool_validations_finished", - "Total number of transactions that finished validation", - )?, - registry, - )?, - }) - } -} - -/// An extension trait for [`ApiMetrics`]. -pub trait ApiMetricsExt { - /// Report an event to the metrics. - fn report(&self, report: impl FnOnce(&ApiMetrics)); -} - -impl ApiMetricsExt for Option> { - fn report(&self, report: impl FnOnce(&ApiMetrics)) { - if let Some(metrics) = self.as_ref() { - report(metrics) - } - } -} diff --git a/crates/client/transaction-pool/src/revalidation.rs b/crates/client/transaction-pool/src/revalidation.rs deleted file mode 100644 index 1e26305bb3..0000000000 --- a/crates/client/transaction-pool/src/revalidation.rs +++ /dev/null @@ -1,310 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Pool periodic revalidation. - -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::pin::Pin; -use std::sync::Arc; -use std::time::Duration; - -use futures::prelude::*; -use sc_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{SaturatedConversion, Zero}; -use sp_runtime::transaction_validity::TransactionValidityError; - -use crate::graph::{ChainApi, ExtrinsicHash, NumberFor, Pool, ValidatedTransaction}; -use crate::LOG_TARGET; - -const BACKGROUND_REVALIDATION_INTERVAL: Duration = Duration::from_millis(200); - -const MIN_BACKGROUND_REVALIDATION_BATCH_SIZE: usize = 20; - -/// Payload from queue to worker. -struct WorkerPayload { - at: NumberFor, - transactions: Vec>, -} - -/// Async revalidation worker. -/// -/// Implements future and can be spawned in place or in background. -struct RevalidationWorker { - api: Arc, - pool: Arc>, - best_block: NumberFor, - block_ordered: BTreeMap, HashSet>>, - members: HashMap, NumberFor>, -} - -impl Unpin for RevalidationWorker {} - -/// Revalidate batch of transaction. -/// -/// Each transaction is validated against chain, and invalid are -/// removed from the `pool`, while valid are resubmitted. -async fn batch_revalidate( - pool: Arc>, - api: Arc, - at: NumberFor, - batch: impl IntoIterator>, -) { - let mut invalid_hashes = Vec::new(); - let mut revalidated = HashMap::new(); - - let validation_results = futures::future::join_all(batch.into_iter().filter_map(|ext_hash| { - pool.validated_pool().ready_by_hash(&ext_hash).map(|ext| { - api.validate_transaction(&BlockId::Number(at), ext.source, ext.data.clone()) - .map(move |validation_result| (validation_result, ext_hash, ext)) - }) - })) - .await; - - for (validation_result, ext_hash, ext) in validation_results { - match validation_result { - Ok(Err(TransactionValidityError::Invalid(err))) => { - log::debug!(target: LOG_TARGET, "[{:?}]: Revalidation: invalid {:?}", ext_hash, err,); - invalid_hashes.push(ext_hash); - } - Ok(Err(TransactionValidityError::Unknown(err))) => { - // skipping unknown, they might be pushed by valid or invalid transaction - // when latter resubmitted. - log::trace!(target: LOG_TARGET, "[{:?}]: Unknown during revalidation: {:?}", ext_hash, err,); - } - Ok(Ok(validity)) => { - revalidated.insert( - ext_hash, - ValidatedTransaction::valid_at( - at.saturated_into::(), - ext_hash, - ext.source, - ext.data.clone(), - api.hash_and_length(&ext.data).1, - validity, - ), - ); - } - Err(validation_err) => { - log::debug!( - target: LOG_TARGET, - "[{:?}]: Removing due to error during revalidation: {}", - ext_hash, - validation_err - ); - invalid_hashes.push(ext_hash); - } - } - } - - pool.validated_pool().remove_invalid(&invalid_hashes); - if !revalidated.is_empty() { - pool.resubmit(revalidated); - } -} - -impl RevalidationWorker { - fn new(api: Arc, pool: Arc>) -> Self { - Self { api, pool, block_ordered: Default::default(), members: Default::default(), best_block: Zero::zero() } - } - - fn prepare_batch(&mut self) -> Vec> { - let mut queued_exts = Vec::new(); - let mut left = std::cmp::max(MIN_BACKGROUND_REVALIDATION_BATCH_SIZE, self.members.len() / 4); - - // Take maximum of count transaction by order - // which they got into the pool - while left > 0 { - let first_block = match self.block_ordered.keys().next().cloned() { - Some(bn) => bn, - None => break, - }; - let mut block_drained = false; - if let Some(extrinsics) = self.block_ordered.get_mut(&first_block) { - let to_queue = extrinsics.iter().take(left).cloned().collect::>(); - if to_queue.len() == extrinsics.len() { - block_drained = true; - } else { - for xt in &to_queue { - extrinsics.remove(xt); - } - } - left -= to_queue.len(); - queued_exts.extend(to_queue); - } - - if block_drained { - self.block_ordered.remove(&first_block); - } - } - - for hash in queued_exts.iter() { - self.members.remove(hash); - } - - queued_exts - } - - fn len(&self) -> usize { - self.block_ordered.iter().map(|b| b.1.len()).sum() - } - - fn push(&mut self, worker_payload: WorkerPayload) { - // we don't add something that already scheduled for revalidation - let transactions = worker_payload.transactions; - let block_number = worker_payload.at; - - for ext_hash in transactions { - // we don't add something that already scheduled for revalidation - if self.members.contains_key(&ext_hash) { - log::trace!(target: LOG_TARGET, "[{:?}] Skipped adding for revalidation: Already there.", ext_hash,); - - continue; - } - - self.block_ordered - .entry(block_number) - .and_modify(|value| { - value.insert(ext_hash); - }) - .or_insert_with(|| { - let mut bt = HashSet::new(); - bt.insert(ext_hash); - bt - }); - self.members.insert(ext_hash, block_number); - } - } - - /// Background worker main loop. - /// - /// It does two things: periodically tries to process some transactions - /// from the queue and also accepts messages to enqueue some more - /// transactions from the pool. - pub async fn run(mut self, from_queue: TracingUnboundedReceiver>, interval: Duration) { - let interval_fut = futures_timer::Delay::new(interval); - let from_queue = from_queue.fuse(); - futures::pin_mut!(interval_fut, from_queue); - let this = &mut self; - - loop { - futures::select! { - // Using `fuse()` in here is okay, because we reset the interval when it has fired. - _ = (&mut interval_fut).fuse() => { - let next_batch = this.prepare_batch(); - let batch_len = next_batch.len(); - - batch_revalidate(this.pool.clone(), this.api.clone(), this.best_block, next_batch).await; - - if batch_len > 0 || this.len() > 0 { - log::debug!( - target: LOG_TARGET, - "Revalidated {} transactions. Left in the queue for revalidation: {}.", - batch_len, - this.len(), - ); - } - - interval_fut.reset(interval); - }, - workload = from_queue.next() => { - match workload { - Some(worker_payload) => { - this.best_block = worker_payload.at; - this.push(worker_payload); - - if !this.members.is_empty() { - log::debug!( - target: LOG_TARGET, - "Updated revalidation queue at {:?}. Transactions: {:?}", - this.best_block, - this.members, - ); - } - - continue; - }, - // R.I.P. worker! - None => break, - } - } - } - } - } -} - -/// Revalidation queue. -/// -/// Can be configured background (`new_background`) -/// or immediate (just `new`). -pub struct RevalidationQueue { - pool: Arc>, - api: Arc, - background: Option>>, -} - -impl RevalidationQueue -where - Api: 'static, -{ - /// New revalidation queue without background worker. - pub fn new(api: Arc, pool: Arc>) -> Self { - Self { api, pool, background: None } - } - - /// New revalidation queue with background worker. - pub fn new_with_interval( - api: Arc, - pool: Arc>, - interval: Duration, - ) -> (Self, Pin + Send>>) { - let (to_worker, from_queue) = tracing_unbounded("mpsc_revalidation_queue", 100_000); - - let worker = RevalidationWorker::new(api.clone(), pool.clone()); - - let queue = Self { api, pool, background: Some(to_worker) }; - - (queue, worker.run(from_queue, interval).boxed()) - } - - /// New revalidation queue with background worker. - pub fn new_background(api: Arc, pool: Arc>) -> (Self, Pin + Send>>) { - Self::new_with_interval(api, pool, BACKGROUND_REVALIDATION_INTERVAL) - } - - /// Queue some transaction for later revalidation. - /// - /// If queue configured with background worker, this will return immediately. - /// If queue configured without background worker, this will resolve after - /// revalidation is actually done. - pub async fn revalidate_later(&self, at: NumberFor, transactions: Vec>) { - if !transactions.is_empty() { - log::debug!(target: LOG_TARGET, "Sent {} transactions to revalidation queue", transactions.len(),); - } - - if let Some(ref to_worker) = self.background { - if let Err(e) = to_worker.unbounded_send(WorkerPayload { at, transactions }) { - log::warn!(target: LOG_TARGET, "Failed to update background worker: {:?}", e); - } - } else { - let pool = self.pool.clone(); - let api = self.api.clone(); - batch_revalidate(pool, api, at, transactions).await - } - } -} diff --git a/crates/client/transaction-pool/src/tests.rs b/crates/client/transaction-pool/src/tests.rs deleted file mode 100644 index 39b0aa18c7..0000000000 --- a/crates/client/transaction-pool/src/tests.rs +++ /dev/null @@ -1,186 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Testing related primitives for internal usage in this crate. - -use crate::graph::{BlockHash, ChainApi, ExtrinsicFor, NumberFor, Pool}; -use parking_lot::Mutex; -use sc_transaction_pool_api::error; -use scale_codec::Encode; -use sp_blockchain::TreeRoute; -use sp_runtime::{ - generic::BlockId, - traits::{Block as BlockT, Hash}, - transaction_validity::{InvalidTransaction, TransactionSource, TransactionValidity, ValidTransaction}, -}; -use std::{collections::HashSet, sync::Arc}; -use substrate_test_runtime::{ - substrate_test_pallet::pallet::Call as PalletCall, BalancesCall, Block, Extrinsic, ExtrinsicBuilder, Hashing, - RuntimeCall, Transfer, TransferData, H256, -}; - -pub(crate) const INVALID_NONCE: u64 = 254; - -/// Test api that implements [`ChainApi`]. -#[derive(Clone, Debug, Default)] -pub(crate) struct TestApi { - pub delay: Arc>>>, - pub invalidate: Arc>>, - pub clear_requirements: Arc>>, - pub add_requirements: Arc>>, - pub validation_requests: Arc>>, -} - -impl TestApi { - /// Query validation requests received. - pub fn validation_requests(&self) -> Vec { - self.validation_requests.lock().clone() - } -} - -impl ChainApi for TestApi { - type Block = Block; - type Error = error::Error; - type ValidationFuture = futures::future::Ready>; - type BodyFuture = futures::future::Ready>>>; - - /// Verify extrinsic at given block. - fn validate_transaction( - &self, - at: &BlockId, - _source: TransactionSource, - uxt: ExtrinsicFor, - ) -> Self::ValidationFuture { - self.validation_requests.lock().push(uxt.clone()); - let hash = self.hash_and_length(&uxt).0; - let block_number = self.block_id_to_number(at).unwrap().unwrap(); - - let res = match uxt { - Extrinsic { function: RuntimeCall::Balances(BalancesCall::transfer_allow_death { .. }), .. } => { - let TransferData { nonce, .. } = (&uxt).try_into().unwrap(); - // This is used to control the test flow. - if nonce > 0 { - let opt = self.delay.lock().take(); - if let Some(delay) = opt { - if delay.recv().is_err() { - println!("Error waiting for delay!"); - } - } - } - - if self.invalidate.lock().contains(&hash) { - InvalidTransaction::Custom(0).into() - } else if nonce < block_number { - InvalidTransaction::Stale.into() - } else { - let mut transaction = ValidTransaction { - priority: 4, - requires: if nonce > block_number { vec![vec![nonce as u8 - 1]] } else { vec![] }, - provides: if nonce == INVALID_NONCE { vec![] } else { vec![vec![nonce as u8]] }, - longevity: 3, - propagate: true, - }; - - if self.clear_requirements.lock().contains(&hash) { - transaction.requires.clear(); - } - - if self.add_requirements.lock().contains(&hash) { - transaction.requires.push(vec![128]); - } - - Ok(transaction) - } - } - Extrinsic { function: RuntimeCall::SubstrateTest(PalletCall::include_data { .. }), .. } => { - Ok(ValidTransaction { - priority: 9001, - requires: vec![], - provides: vec![vec![42]], - longevity: 9001, - propagate: false, - }) - } - Extrinsic { function: RuntimeCall::SubstrateTest(PalletCall::indexed_call { .. }), .. } => { - Ok(ValidTransaction { - priority: 9001, - requires: vec![], - provides: vec![vec![43]], - longevity: 9001, - propagate: false, - }) - } - _ => unimplemented!(), - }; - - futures::future::ready(Ok(res)) - } - - /// Returns a block number given the block id. - fn block_id_to_number(&self, at: &BlockId) -> Result>, Self::Error> { - Ok(match at { - BlockId::Number(num) => Some(*num), - BlockId::Hash(_) => None, - }) - } - - /// Returns a block hash given the block id. - fn block_id_to_hash( - &self, - at: &BlockId, - ) -> Result::Hash>, Self::Error> { - Ok(match at { - BlockId::Number(num) => Some(H256::from_low_u64_be(*num)).into(), - BlockId::Hash(_) => None, - }) - } - - /// Hash the extrinsic. - fn hash_and_length(&self, uxt: &ExtrinsicFor) -> (BlockHash, usize) { - let encoded = uxt.encode(); - let len = encoded.len(); - (Hashing::hash(&encoded), len) - } - - fn block_body(&self, _id: ::Hash) -> Self::BodyFuture { - futures::future::ready(Ok(None)) - } - - fn block_header( - &self, - _: ::Hash, - ) -> Result::Header>, Self::Error> { - Ok(None) - } - - fn tree_route( - &self, - _from: ::Hash, - _to: ::Hash, - ) -> Result, Self::Error> { - unimplemented!() - } -} - -pub(crate) fn uxt(transfer: Transfer) -> Extrinsic { - ExtrinsicBuilder::new_transfer(transfer).build() -} - -pub(crate) fn pool() -> Pool { - Pool::new(Default::default(), true.into(), TestApi::default().into()) -} diff --git a/crates/client/transaction-pool/tests/pool.rs b/crates/client/transaction-pool/tests/pool.rs deleted file mode 100644 index 8f8ca1eceb..0000000000 --- a/crates/client/transaction-pool/tests/pool.rs +++ /dev/null @@ -1,1461 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -//! Tests for top-level transaction pool api - -use std::collections::BTreeSet; -use std::pin::Pin; -use std::sync::Arc; - -use futures::executor::{block_on, block_on_stream}; -use futures::prelude::*; -use futures::task::Poll; -use sc_block_builder::BlockBuilderProvider; -use sc_client_api::client::BlockchainEvents; -use sc_transaction_pool::*; -use sc_transaction_pool_api::{ChainEvent, MaintainedTransactionPool, TransactionPool, TransactionStatus}; -use scale_codec::Encode; -use sp_blockchain::HeaderBackend; -use sp_consensus::BlockOrigin; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::Block as _; -use sp_runtime::transaction_validity::{TransactionSource, ValidTransaction}; -use substrate_test_runtime_client::runtime::{ - Block, Extrinsic, ExtrinsicBuilder, Hash, Header, Transfer, TransferData, -}; -use substrate_test_runtime_client::AccountKeyring::*; -use substrate_test_runtime_client::ClientBlockImportExt; -use substrate_test_runtime_transaction_pool::{uxt, TestApi}; - -const LOG_TARGET: &str = "txpool"; -type Nonce = u64; - -fn pool() -> Pool { - Pool::new(Default::default(), true.into(), TestApi::with_alice_nonce(209).into()) -} - -fn maintained_pool() -> (BasicPool, Arc, futures::executor::ThreadPool) { - let api = Arc::new(TestApi::with_alice_nonce(209)); - let (pool, background_task) = create_basic_pool_with_genesis(api.clone()); - - let thread_pool = futures::executor::ThreadPool::new().unwrap(); - thread_pool.spawn_ok(background_task); - (pool, api, thread_pool) -} - -// new types just for the next function (so that "cargo clippy -- -D warnings" doesn't get mad) -type BasicPoolTest = BasicPool; -type BackgroundTask = Pin + Send>>; - -fn create_basic_pool_with_genesis(test_api: Arc) -> (BasicPoolTest, BackgroundTask) { - let genesis_hash = { - test_api - .chain() - .read() - .block_by_number - .get(&0) - .map(|blocks| blocks[0].0.header.hash()) - .expect("there is block 0. qed") - }; - BasicPool::new_test(test_api, genesis_hash, genesis_hash) -} - -fn create_basic_pool(test_api: TestApi) -> BasicPool { - create_basic_pool_with_genesis(Arc::from(test_api)).0 -} - -const SOURCE: TransactionSource = TransactionSource::External; - -#[test] -fn submission_should_work() { - let pool = pool(); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 209))).unwrap(); - - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, vec![209]); -} - -#[test] -fn multiple_submission_should_work() { - let pool = pool(); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 209))).unwrap(); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 210))).unwrap(); - - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, vec![209, 210]); -} - -#[test] -fn early_nonce_should_be_culled() { - sp_tracing::try_init_simple(); - let pool = pool(); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 208))).unwrap(); - - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, Vec::::new()); -} - -#[test] -fn late_nonce_should_be_queued() { - let pool = pool(); - - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 210))).unwrap(); - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, Vec::::new()); - - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 209))).unwrap(); - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, vec![209, 210]); -} - -#[test] -fn prune_tags_should_work() { - let pool = pool(); - let hash209 = block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 209))).unwrap(); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt(Alice, 210))).unwrap(); - - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, vec![209, 210]); - - pool.validated_pool().api().push_block(1, Vec::new(), true); - block_on(pool.prune_tags(&BlockId::number(1), vec![vec![209]], vec![hash209])).expect("Prune tags"); - - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, vec![210]); -} - -#[test] -fn should_ban_invalid_transactions() { - let pool = pool(); - let uxt = uxt(Alice, 209); - let hash = block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt.clone())).unwrap(); - pool.validated_pool().remove_invalid(&[hash]); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt.clone())).unwrap_err(); - - // when - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, Vec::::new()); - - // then - block_on(pool.submit_one(&BlockId::number(0), SOURCE, uxt.clone())).unwrap_err(); -} - -#[test] -fn only_prune_on_new_best() { - let (pool, api, _) = maintained_pool(); - let uxt = uxt(Alice, 209); - - let _ = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, uxt.clone())).expect("1. Imported"); - pool.api().push_block(1, vec![uxt.clone()], true); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(2, vec![uxt], true); - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); -} - -#[test] -fn should_correctly_prune_transactions_providing_more_than_one_tag() { - let api = Arc::new(TestApi::with_alice_nonce(209)); - api.set_valid_modifier(Box::new(|v: &mut ValidTransaction| { - v.provides.push(vec![155]); - })); - let pool = Pool::new(Default::default(), true.into(), api.clone()); - let xt = uxt(Alice, 209); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.validated_pool().status().ready, 1); - - // remove the transaction that just got imported. - api.increment_nonce(Alice.into()); - api.push_block(1, Vec::new(), true); - block_on(pool.prune_tags(&BlockId::number(1), vec![vec![209]], vec![])).expect("1. Pruned"); - assert_eq!(pool.validated_pool().status().ready, 0); - // it's re-imported to future - assert_eq!(pool.validated_pool().status().future, 1); - - // so now let's insert another transaction that also provides the 155 - api.increment_nonce(Alice.into()); - api.push_block(2, Vec::new(), true); - let xt = uxt(Alice, 211); - block_on(pool.submit_one(&BlockId::number(2), SOURCE, xt.clone())).expect("2. Imported"); - assert_eq!(pool.validated_pool().status().ready, 1); - assert_eq!(pool.validated_pool().status().future, 1); - let pending: Vec<_> = - pool.validated_pool().ready().map(|a| TransferData::try_from(&a.data).unwrap().nonce).collect(); - assert_eq!(pending, vec![211]); - - // prune it and make sure the pool is empty - api.increment_nonce(Alice.into()); - api.push_block(3, Vec::new(), true); - block_on(pool.prune_tags(&BlockId::number(3), vec![vec![155]], vec![])).expect("2. Pruned"); - assert_eq!(pool.validated_pool().status().ready, 0); - assert_eq!(pool.validated_pool().status().future, 2); -} - -fn block_event(header: Header) -> ChainEvent { - ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None } -} - -fn block_event_with_retracted( - new_best_block_header: Header, - retracted_start: Hash, - api: &TestApi, -) -> ChainEvent { - let tree_route = api.tree_route(retracted_start, new_best_block_header.parent_hash).expect("Tree route exists"); - - ChainEvent::NewBestBlock { hash: new_best_block_header.hash(), tree_route: Some(Arc::new(tree_route)) } -} - -#[test] -fn should_prune_old_during_maintenance() { - let xt = uxt(Alice, 209); - - let (pool, api, _guard) = maintained_pool(); - - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(1, vec![xt.clone()], true); - - block_on(pool.maintain(block_event(header))); - assert_eq!(pool.status().ready, 0); -} - -#[test] -fn should_revalidate_during_maintenance() { - let xt1 = uxt(Alice, 209); - let xt2 = uxt(Alice, 210); - - let (pool, api, _guard) = maintained_pool(); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt1.clone())).expect("1. Imported"); - let watcher = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, xt2.clone())).expect("2. Imported"); - assert_eq!(pool.status().ready, 2); - assert_eq!(api.validation_requests().len(), 2); - - let header = api.push_block(1, vec![xt1.clone()], true); - - api.add_invalid(&xt2); - - block_on(pool.maintain(block_event(header))); - assert_eq!(pool.status().ready, 1); - - // test that pool revalidated transaction that left ready and not included in the block - assert_eq!( - futures::executor::block_on_stream(watcher).collect::>(), - vec![TransactionStatus::Ready, TransactionStatus::Invalid], - ); -} - -#[test] -fn should_resubmit_from_retracted_during_maintenance() { - let xt = uxt(Alice, 209); - - let (pool, api, _guard) = maintained_pool(); - - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(1, vec![], true); - let fork_header = api.push_block(1, vec![], true); - - let event = block_event_with_retracted(header, fork_header.hash(), pool.api()); - - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 1); -} - -#[test] -fn should_not_resubmit_from_retracted_during_maintenance_if_tx_is_also_in_enacted() { - let xt = uxt(Alice, 209); - - let (pool, api, _guard) = maintained_pool(); - - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(1, vec![xt.clone()], true); - let fork_header = api.push_block(1, vec![xt], true); - - let event = block_event_with_retracted(header, fork_header.hash(), pool.api()); - - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); -} - -#[test] -fn should_not_retain_invalid_hashes_from_retracted() { - let xt = uxt(Alice, 209); - - let (pool, api, _guard) = maintained_pool(); - - let watcher = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(1, vec![], true); - let fork_header = api.push_block(1, vec![xt.clone()], true); - api.add_invalid(&xt); - - let event = block_event_with_retracted(header, fork_header.hash(), pool.api()); - block_on(pool.maintain(event)); - - assert_eq!( - futures::executor::block_on_stream(watcher).collect::>(), - vec![TransactionStatus::Ready, TransactionStatus::Invalid], - ); - - assert_eq!(pool.status().ready, 0); -} - -#[test] -fn should_revalidate_across_many_blocks() { - let xt1 = uxt(Alice, 209); - let xt2 = uxt(Alice, 210); - let xt3 = uxt(Alice, 211); - - let (pool, api, _guard) = maintained_pool(); - - let watcher1 = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, xt1.clone())).expect("1. Imported"); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt2.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 2); - - let header = api.push_block(1, vec![], true); - block_on(pool.maintain(block_event(header))); - - block_on(pool.submit_one(&BlockId::number(1), SOURCE, xt3.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 3); - - let header = api.push_block(2, vec![xt1.clone()], true); - let block_hash = header.hash(); - block_on(pool.maintain(block_event(header.clone()))); - - block_on( - watcher1.take_while(|s| future::ready(*s != TransactionStatus::InBlock((block_hash, 0)))).collect::>(), - ); - - assert_eq!(pool.status().ready, 2); -} - -#[test] -fn should_push_watchers_during_maintenance() { - fn alice_uxt(nonce: u64) -> Extrinsic { - uxt(Alice, 209 + nonce) - } - - // given - let (pool, api, _guard) = maintained_pool(); - - let tx0 = alice_uxt(0); - let watcher0 = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx0.clone())).unwrap(); - let tx1 = alice_uxt(1); - let watcher1 = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx1.clone())).unwrap(); - let tx2 = alice_uxt(2); - let watcher2 = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx2.clone())).unwrap(); - let tx3 = alice_uxt(3); - let watcher3 = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx3.clone())).unwrap(); - let tx4 = alice_uxt(4); - let watcher4 = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx4.clone())).unwrap(); - assert_eq!(pool.status().ready, 5); - - // when - api.add_invalid(&tx3); - api.add_invalid(&tx4); - - // clear timer events if any - let header = api.push_block(1, vec![], true); - block_on(pool.maintain(block_event(header))); - - // then - // hash3 is now invalid - // hash4 is now invalid - assert_eq!( - futures::executor::block_on_stream(watcher3).collect::>(), - vec![TransactionStatus::Ready, TransactionStatus::Invalid], - ); - assert_eq!( - futures::executor::block_on_stream(watcher4).collect::>(), - vec![TransactionStatus::Ready, TransactionStatus::Invalid], - ); - assert_eq!(pool.status().ready, 3); - - // when - let header = api.push_block(2, vec![tx0, tx1, tx2], true); - let header_hash = header.hash(); - block_on(pool.maintain(block_event(header))); - - let event = ChainEvent::Finalized { hash: header_hash, tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - - // then - // events for hash0 are: Ready, InBlock - // events for hash1 are: Ready, InBlock - // events for hash2 are: Ready, InBlock - assert_eq!( - futures::executor::block_on_stream(watcher0).collect::>(), - vec![ - TransactionStatus::Ready, - TransactionStatus::InBlock((header_hash, 0)), - TransactionStatus::Finalized((header_hash, 0)) - ], - ); - assert_eq!( - futures::executor::block_on_stream(watcher1).collect::>(), - vec![ - TransactionStatus::Ready, - TransactionStatus::InBlock((header_hash, 1)), - TransactionStatus::Finalized((header_hash, 1)) - ], - ); - assert_eq!( - futures::executor::block_on_stream(watcher2).collect::>(), - vec![ - TransactionStatus::Ready, - TransactionStatus::InBlock((header_hash, 2)), - TransactionStatus::Finalized((header_hash, 2)) - ], - ); -} - -#[test] -fn finalization() { - let xt = uxt(Alice, 209); - let api = TestApi::with_alice_nonce(209); - api.push_block(1, vec![], true); - let pool = create_basic_pool(api); - let watcher = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, xt.clone())).expect("1. Imported"); - pool.api().push_block(2, vec![xt.clone()], true); - - let header = pool.api().chain().read().block_by_number.get(&2).unwrap()[0].0.header().clone(); - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - - let event = ChainEvent::Finalized { hash: header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - - let mut stream = futures::executor::block_on_stream(watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((header.hash(), 0)))); - assert_eq!(stream.next(), None); -} - -#[test] -fn fork_aware_finalization() { - sp_tracing::try_init_simple(); - let api = TestApi::empty(); - // starting block A1 (last finalized.) - let a_header = api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - let mut canon_watchers = vec![]; - - let from_alice = uxt(Alice, 1); - let from_dave = uxt(Dave, 2); - let from_bob = uxt(Bob, 1); - let from_charlie = uxt(Charlie, 1); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Dave.into()); - pool.api().increment_nonce(Charlie.into()); - pool.api().increment_nonce(Bob.into()); - - let from_dave_watcher; - let from_bob_watcher; - let b1; - let c1; - let d1; - let c2; - let d2; - - block_on(pool.maintain(block_event(a_header))); - - // block B1 - { - let watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - let header = pool.api().push_block(2, vec![from_alice.clone()], true); - canon_watchers.push((watcher, header.hash())); - assert_eq!(pool.status().ready, 1); - - log::trace!(target: LOG_TARGET, ">> B1: {:?} {:?}", header.hash(), header); - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - b1 = header.hash(); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - let event = ChainEvent::Finalized { hash: b1, tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - // block C2 - { - let header = pool.api().push_block_with_parent(b1, vec![from_dave.clone()], true); - from_dave_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_dave.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - log::trace!(target: LOG_TARGET, ">> C2: {:?} {:?}", header.hash(), header); - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - c2 = header.hash(); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - // block D2 - { - from_bob_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - let header = pool.api().push_block_with_parent(c2, vec![from_bob.clone()], true); - - log::trace!(target: LOG_TARGET, ">> D2: {:?} {:?}", header.hash(), header); - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - d2 = header.hash(); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - // block C1 - { - let watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_charlie.clone())).expect("1.Imported"); - assert_eq!(pool.status().ready, 1); - let header = pool.api().push_block_with_parent(b1, vec![from_charlie.clone()], true); - log::trace!(target: LOG_TARGET, ">> C1: {:?} {:?}", header.hash(), header); - c1 = header.hash(); - canon_watchers.push((watcher, header.hash())); - let event = block_event_with_retracted(header.clone(), d2, pool.api()); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 2); - - let event = ChainEvent::Finalized { hash: header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - // block D1 - { - let xt = uxt(Eve, 0); - let w = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 3); - let header = pool.api().push_block_with_parent(c1, vec![xt.clone()], true); - log::trace!(target: LOG_TARGET, ">> D1: {:?} {:?}", header.hash(), header); - d1 = header.hash(); - canon_watchers.push((w, header.hash())); - - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 2); - let event = ChainEvent::Finalized { hash: d1, tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - let e1; - - // block E1 - { - let header = pool.api().push_block_with_parent(d1, vec![from_dave, from_bob], true); - log::trace!(target: LOG_TARGET, ">> E1: {:?} {:?}", header.hash(), header); - e1 = header.hash(); - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - block_on(pool.maintain(ChainEvent::Finalized { hash: e1, tree_route: Arc::from(vec![]) })); - } - - for (canon_watcher, h) in canon_watchers { - let mut stream = futures::executor::block_on_stream(canon_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((h, 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((h, 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_dave_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((c2, 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(c2))); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((e1, 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((e1, 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_bob_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((d2, 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(d2))); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - // In block e1 we submitted: [dave, bob] xts in this order. - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((e1, 1)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((e1, 1)))); - assert_eq!(stream.next(), None); - } -} - -/// Tests that when pruning and retracing a tx by the same event, we generate -/// the correct events in the correct order. -#[test] -fn prune_and_retract_tx_at_same_time() { - let api = TestApi::empty(); - // starting block A1 (last finalized.) - api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let from_alice = uxt(Alice, 1); - pool.api().increment_nonce(Alice.into()); - - let watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - - // Block B1 - let b1 = { - let header = pool.api().push_block(2, vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 1); - - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - header.hash() - }; - - // Block B2 - let b2 = { - let header = pool.api().push_block(2, vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 0); - - let event = block_event_with_retracted(header.clone(), b1, pool.api()); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - - let event = ChainEvent::Finalized { hash: header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - - header.hash() - }; - - { - let mut stream = futures::executor::block_on_stream(watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1, 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(b1))); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2, 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b2, 0)))); - assert_eq!(stream.next(), None); - } -} - -/// This test ensures that transactions from a fork are re-submitted if -/// the forked block is not part of the retracted blocks. This happens as the -/// retracted block list only contains the route from the old best to the new -/// best, without any further forks. -/// -/// Given the following: -/// -/// -> D0 (old best, tx0) -/// / -/// C - -> D1 (tx1) -/// \ -/// -> D2 (new best) -/// -/// Retracted will contain `D0`, but we need to re-submit `tx0` and `tx1` as both -/// blocks are not part of the canonical chain. -#[test] -fn resubmit_tx_of_fork_that_is_not_part_of_retracted() { - let api = TestApi::empty(); - // starting block A1 (last finalized.) - api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let tx0 = uxt(Alice, 1); - let tx1 = uxt(Dave, 2); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Dave.into()); - - let d0; - - // Block D0 - { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx0.clone())).expect("1. Imported"); - let header = pool.api().push_block(2, vec![tx0.clone()], true); - assert_eq!(pool.status().ready, 1); - - let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; - d0 = header.hash(); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - // Block D1 - { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx1.clone())).expect("1. Imported"); - pool.api().push_block(2, vec![tx1.clone()], false); - assert_eq!(pool.status().ready, 1); - } - - // Block D2 - { - // push new best block - let header = pool.api().push_block(2, vec![], true); - let event = block_event_with_retracted(header, d0, pool.api()); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 2); - } -} - -#[test] -fn resubmit_from_retracted_fork() { - let api = TestApi::empty(); - // starting block A1 (last finalized.) - api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let tx0 = uxt(Alice, 1); - let tx1 = uxt(Dave, 2); - let tx2 = uxt(Bob, 3); - - // Transactions of the fork that will be enacted later - let tx3 = uxt(Eve, 1); - let tx4 = uxt(Ferdie, 2); - let tx5 = uxt(One, 3); - - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Dave.into()); - pool.api().increment_nonce(Bob.into()); - pool.api().increment_nonce(Eve.into()); - pool.api().increment_nonce(Ferdie.into()); - pool.api().increment_nonce(One.into()); - - // Block D0 - { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx0.clone())).expect("1. Imported"); - let header = pool.api().push_block(2, vec![tx0.clone()], true); - assert_eq!(pool.status().ready, 1); - - block_on(pool.maintain(block_event(header))); - assert_eq!(pool.status().ready, 0); - } - - // Block E0 - { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx1.clone())).expect("1. Imported"); - let header = pool.api().push_block(3, vec![tx1.clone()], true); - block_on(pool.maintain(block_event(header))); - assert_eq!(pool.status().ready, 0); - } - - // Block F0 - let f0 = { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx2.clone())).expect("1. Imported"); - let header = pool.api().push_block(4, vec![tx2.clone()], true); - block_on(pool.maintain(block_event(header.clone()))); - assert_eq!(pool.status().ready, 0); - header.hash() - }; - - // Block D1 - let d1 = { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx3.clone())).expect("1. Imported"); - let header = pool.api().push_block(2, vec![tx3.clone()], true); - assert_eq!(pool.status().ready, 1); - header.hash() - }; - - // Block E1 - let e1 = { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx4.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(d1, vec![tx4.clone()], true); - assert_eq!(pool.status().ready, 2); - header.hash() - }; - - // Block F1 - let f1_header = { - let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx5.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(e1, vec![tx5.clone()], true); - // Don't announce the block event to the pool directly, because we will - // re-org to this block. - assert_eq!(pool.status().ready, 3); - header - }; - - let ready = pool.ready().map(|t| t.data.encode()).collect::>(); - let expected_ready = vec![tx3, tx4, tx5].iter().map(Encode::encode).collect::>(); - assert_eq!(expected_ready, ready); - - let event = block_event_with_retracted(f1_header, f0, pool.api()); - block_on(pool.maintain(event)); - - assert_eq!(pool.status().ready, 3); - let ready = pool.ready().map(|t| t.data.encode()).collect::>(); - let expected_ready = vec![tx0, tx1, tx2].iter().map(Encode::encode).collect::>(); - assert_eq!(expected_ready, ready); -} - -#[test] -fn ready_set_should_not_resolve_before_block_update() { - let (pool, _api, _guard) = maintained_pool(); - let xt1 = uxt(Alice, 209); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt1.clone())).expect("1. Imported"); - - assert!(pool.ready_at(1).now_or_never().is_none()); -} - -#[test] -fn ready_set_should_resolve_after_block_update() { - let (pool, api, _guard) = maintained_pool(); - let header = api.push_block(1, vec![], true); - - let xt1 = uxt(Alice, 209); - - block_on(pool.submit_one(&BlockId::number(1), SOURCE, xt1.clone())).expect("1. Imported"); - block_on(pool.maintain(block_event(header))); - - assert!(pool.ready_at(1).now_or_never().is_some()); -} - -#[test] -fn ready_set_should_eventually_resolve_when_block_update_arrives() { - let (pool, api, _guard) = maintained_pool(); - let header = api.push_block(1, vec![], true); - - let xt1 = uxt(Alice, 209); - - block_on(pool.submit_one(&BlockId::number(1), SOURCE, xt1.clone())).expect("1. Imported"); - - let noop_waker = futures::task::noop_waker(); - let mut context = futures::task::Context::from_waker(&noop_waker); - - let mut ready_set_future = pool.ready_at(1); - if ready_set_future.poll_unpin(&mut context).is_ready() { - panic!("Ready set should not be ready before block update!"); - } - - block_on(pool.maintain(block_event(header))); - - match ready_set_future.poll_unpin(&mut context) { - Poll::Pending => { - panic!("Ready set should become ready after block update!"); - } - Poll::Ready(iterator) => { - let data = iterator.collect::>(); - assert_eq!(data.len(), 1); - } - } -} - -#[test] -fn import_notification_to_pool_maintain_works() { - let mut client = Arc::new(substrate_test_runtime_client::new()); - - let best_hash = client.info().best_hash; - let finalized_hash = client.info().finalized_hash; - - let pool = Arc::new( - BasicPool::new_test( - Arc::new(FullChainApi::new(client.clone(), None, &sp_core::testing::TaskExecutor::new())), - best_hash, - finalized_hash, - ) - .0, - ); - - // Prepare the extrisic, push it to the pool and check that it was added. - let xt = uxt(Alice, 0); - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let mut import_stream = block_on_stream(client.import_notification_stream()); - - // Build the block with the transaction included - let mut block_builder = client.new_block(Default::default()).unwrap(); - block_builder.push(xt).unwrap(); - let block = block_builder.build().unwrap().block; - block_on(client.import(BlockOrigin::Own, block)).unwrap(); - - // Get the notification of the block import and maintain the pool with it, - // Now, the pool should not contain any transactions. - let evt = import_stream.next().expect("Importing a block leads to an event"); - block_on(pool.maintain(evt.try_into().expect("Imported as new best block"))); - assert_eq!(pool.status().ready, 0); -} - -// When we prune transactions, we need to make sure that we remove -#[test] -fn pruning_a_transaction_should_remove_it_from_best_transaction() { - let (pool, api, _guard) = maintained_pool(); - - let xt1 = ExtrinsicBuilder::new_include_data(Vec::new()).build(); - - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt1.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - let header = api.push_block(1, vec![xt1.clone()], true); - - // This will prune `xt1`. - block_on(pool.maintain(block_event(header))); - - assert_eq!(pool.status().ready, 0); -} - -#[test] -fn stale_transactions_are_pruned() { - sp_tracing::try_init_simple(); - - // Our initial transactions - let xts = vec![ - Transfer { from: Alice.into(), to: Bob.into(), nonce: 1, amount: 1 }, - Transfer { from: Alice.into(), to: Bob.into(), nonce: 2, amount: 1 }, - Transfer { from: Alice.into(), to: Bob.into(), nonce: 3, amount: 1 }, - ]; - - let (pool, api, _guard) = maintained_pool(); - - xts.into_iter().for_each(|xt| { - block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.into_unchecked_extrinsic())).expect("1. Imported"); - }); - assert_eq!(pool.status().ready, 0); - assert_eq!(pool.status().future, 3); - - // Almost the same as our initial transactions, but with some different `amount`s to make them - // generate a different hash - let xts = vec![ - Transfer { from: Alice.into(), to: Bob.into(), nonce: 1, amount: 2 }.into_unchecked_extrinsic(), - Transfer { from: Alice.into(), to: Bob.into(), nonce: 2, amount: 2 }.into_unchecked_extrinsic(), - Transfer { from: Alice.into(), to: Bob.into(), nonce: 3, amount: 2 }.into_unchecked_extrinsic(), - ]; - - // Import block - let header = api.push_block(1, xts, true); - block_on(pool.maintain(block_event(header))); - // The imported transactions have a different hash and should not evict our initial - // transactions. - assert_eq!(pool.status().future, 3); - - // Import enough blocks to make our transactions stale - for n in 1..66 { - let header = api.push_block(n, vec![], true); - block_on(pool.maintain(block_event(header))); - } - - assert_eq!(pool.status().future, 0); - assert_eq!(pool.status().ready, 0); -} - -#[test] -fn finalized_only_handled_correctly() { - sp_tracing::try_init_simple(); - let xt = uxt(Alice, 209); - - let (pool, api, _guard) = maintained_pool(); - - let watcher = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(1, vec![xt], true); - - let event = ChainEvent::Finalized { hash: header.clone().hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - - assert_eq!(pool.status().ready, 0); - - { - let mut stream = futures::executor::block_on_stream(watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((header.clone().hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((header.hash(), 0)))); - assert_eq!(stream.next(), None); - } -} - -#[test] -fn best_block_after_finalized_handled_correctly() { - sp_tracing::try_init_simple(); - let xt = uxt(Alice, 209); - - let (pool, api, _guard) = maintained_pool(); - - let watcher = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, xt.clone())).expect("1. Imported"); - assert_eq!(pool.status().ready, 1); - - let header = api.push_block(1, vec![xt], true); - - let event = ChainEvent::Finalized { hash: header.clone().hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - block_on(pool.maintain(block_event(header.clone()))); - - assert_eq!(pool.status().ready, 0); - - { - let mut stream = futures::executor::block_on_stream(watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((header.clone().hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((header.hash(), 0)))); - assert_eq!(stream.next(), None); - } -} - -#[test] -fn switching_fork_with_finalized_works() { - sp_tracing::try_init_simple(); - let api = TestApi::empty(); - // starting block A1 (last finalized.) - let a_header = api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let from_alice = uxt(Alice, 1); - let from_bob = uxt(Bob, 2); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Bob.into()); - - let from_alice_watcher; - let from_bob_watcher; - let b1_header; - let b2_header; - - // block B1 - { - from_alice_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 1); - log::trace!(target: LOG_TARGET, ">> B1: {:?} {:?}", header.hash(), header); - b1_header = header; - } - - // block B2 - { - from_bob_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())).expect("1. Imported"); - let header = - pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone(), from_bob.clone()], true); - assert_eq!(pool.status().ready, 2); - - log::trace!(target: LOG_TARGET, ">> B2: {:?} {:?}", header.hash(), header); - b2_header = header; - } - - { - let event = ChainEvent::NewBestBlock { hash: b1_header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 1); - } - - { - let event = ChainEvent::Finalized { hash: b2_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - { - let mut stream = futures::executor::block_on_stream(from_alice_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(b1_header.hash()))); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b2_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_bob_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2_header.hash(), 1)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b2_header.hash(), 1)))); - assert_eq!(stream.next(), None); - } -} - -#[test] -fn switching_fork_multiple_times_works() { - sp_tracing::try_init_simple(); - let api = TestApi::empty(); - // starting block A1 (last finalized.) - let a_header = api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let from_alice = uxt(Alice, 1); - let from_bob = uxt(Bob, 2); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Bob.into()); - - let from_alice_watcher; - let from_bob_watcher; - let b1_header; - let b2_header; - - // block B1 - { - from_alice_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 1); - log::trace!(target: LOG_TARGET, ">> B1: {:?} {:?}", header.hash(), header); - b1_header = header; - } - - // block B2 - { - from_bob_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())).expect("1. Imported"); - let header = - pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone(), from_bob.clone()], true); - assert_eq!(pool.status().ready, 2); - - log::trace!(target: LOG_TARGET, ">> B2: {:?} {:?}", header.hash(), header); - b2_header = header; - } - - { - // phase-0 - let event = ChainEvent::NewBestBlock { hash: b1_header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 1); - } - - { - // phase-1 - let event = block_event_with_retracted(b2_header.clone(), b1_header.hash(), pool.api()); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - { - // phase-2 - let event = block_event_with_retracted(b1_header.clone(), b2_header.hash(), pool.api()); - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 1); - } - - { - // phase-3 - let event = ChainEvent::Finalized { hash: b2_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - { - let mut stream = futures::executor::block_on_stream(from_alice_watcher); - // phase-0 - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1_header.hash(), 0)))); - // phase-1 - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(b1_header.hash()))); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2_header.hash(), 0)))); - // phase-2 - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(b2_header.hash()))); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1_header.hash(), 0)))); - // phase-3 - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(b1_header.hash()))); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b2_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_bob_watcher); - // phase-1 - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2_header.hash(), 1)))); - // phase-2 - assert_eq!(stream.next(), Some(TransactionStatus::Retracted(b2_header.hash()))); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - // phase-3 - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b2_header.hash(), 1)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b2_header.hash(), 1)))); - assert_eq!(stream.next(), None); - } -} - -#[test] -fn two_blocks_delayed_finalization_works() { - sp_tracing::try_init_simple(); - let api = TestApi::empty(); - // starting block A1 (last finalized.) - let a_header = api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let from_alice = uxt(Alice, 1); - let from_bob = uxt(Bob, 2); - let from_charlie = uxt(Charlie, 3); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Bob.into()); - pool.api().increment_nonce(Charlie.into()); - - let from_alice_watcher; - let from_bob_watcher; - let from_charlie_watcher; - let b1_header; - let c1_header; - let d1_header; - - // block B1 - { - from_alice_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 1); - - log::trace!(target: LOG_TARGET, ">> B1: {:?} {:?}", header.hash(), header); - b1_header = header; - } - - // block C1 - { - from_bob_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(b1_header.hash(), vec![from_bob.clone()], true); - assert_eq!(pool.status().ready, 2); - - log::trace!(target: LOG_TARGET, ">> C1: {:?} {:?}", header.hash(), header); - c1_header = header; - } - - // block D1 - { - from_charlie_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_charlie.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(c1_header.hash(), vec![from_charlie.clone()], true); - assert_eq!(pool.status().ready, 3); - - log::trace!(target: LOG_TARGET, ">> D1: {:?} {:?}", header.hash(), header); - d1_header = header; - } - - { - let event = ChainEvent::Finalized { hash: a_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 3); - } - - { - let event = ChainEvent::NewBestBlock { hash: d1_header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - { - let event = ChainEvent::Finalized { hash: c1_header.hash(), tree_route: Arc::from(vec![b1_header.hash()]) }; - block_on(pool.maintain(event)); - } - - // this is to collect events from_charlie_watcher and make sure nothing was retracted - { - let event = ChainEvent::Finalized { hash: d1_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - { - let mut stream = futures::executor::block_on_stream(from_alice_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_bob_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((c1_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((c1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_charlie_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((d1_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((d1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } -} - -#[test] -fn delayed_finalization_does_not_retract() { - sp_tracing::try_init_simple(); - let api = TestApi::empty(); - // starting block A1 (last finalized.) - let a_header = api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let from_alice = uxt(Alice, 1); - let from_bob = uxt(Bob, 2); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Bob.into()); - - let from_alice_watcher; - let from_bob_watcher; - let b1_header; - let c1_header; - - // block B1 - { - from_alice_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 1); - - log::trace!(target: LOG_TARGET, ">> B1: {:?} {:?}", header.hash(), header); - b1_header = header; - } - - // block C1 - { - from_bob_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(b1_header.hash(), vec![from_bob.clone()], true); - assert_eq!(pool.status().ready, 2); - - log::trace!(target: LOG_TARGET, ">> C1: {:?} {:?}", header.hash(), header); - c1_header = header; - } - - { - // phase-0 - let event = ChainEvent::NewBestBlock { hash: b1_header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 1); - } - - { - // phase-1 - let event = ChainEvent::NewBestBlock { hash: c1_header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - { - // phase-2 - let event = ChainEvent::Finalized { hash: b1_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - { - // phase-3 - let event = ChainEvent::Finalized { hash: c1_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - { - let mut stream = futures::executor::block_on_stream(from_alice_watcher); - // phase-0 - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1_header.hash(), 0)))); - // phase-2 - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_bob_watcher); - // phase-0 - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - // phase-1 - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((c1_header.hash(), 0)))); - // phase-3 - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((c1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } -} - -#[test] -fn best_block_after_finalization_does_not_retract() { - sp_tracing::try_init_simple(); - let api = TestApi::empty(); - // starting block A1 (last finalized.) - let a_header = api.push_block(1, vec![], true); - - let pool = create_basic_pool(api); - - let from_alice = uxt(Alice, 1); - let from_bob = uxt(Bob, 2); - pool.api().increment_nonce(Alice.into()); - pool.api().increment_nonce(Bob.into()); - - let from_alice_watcher; - let from_bob_watcher; - let b1_header; - let c1_header; - - // block B1 - { - from_alice_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(a_header.hash(), vec![from_alice.clone()], true); - assert_eq!(pool.status().ready, 1); - - log::trace!(target: LOG_TARGET, ">> B1: {:?} {:?}", header.hash(), header); - b1_header = header; - } - - // block C1 - { - from_bob_watcher = - block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())).expect("1. Imported"); - let header = pool.api().push_block_with_parent(b1_header.hash(), vec![from_bob.clone()], true); - assert_eq!(pool.status().ready, 2); - - log::trace!(target: LOG_TARGET, ">> C1: {:?} {:?}", header.hash(), header); - c1_header = header; - } - - { - let event = ChainEvent::Finalized { hash: a_header.hash(), tree_route: Arc::from(vec![]) }; - block_on(pool.maintain(event)); - } - - { - let event = ChainEvent::Finalized { - hash: c1_header.hash(), - tree_route: Arc::from(vec![a_header.hash(), b1_header.hash()]), - }; - block_on(pool.maintain(event)); - assert_eq!(pool.status().ready, 0); - } - - { - let event = ChainEvent::NewBestBlock { hash: b1_header.hash(), tree_route: None }; - block_on(pool.maintain(event)); - } - - { - let mut stream = futures::executor::block_on_stream(from_alice_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((b1_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((b1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } - - { - let mut stream = futures::executor::block_on_stream(from_bob_watcher); - assert_eq!(stream.next(), Some(TransactionStatus::Ready)); - assert_eq!(stream.next(), Some(TransactionStatus::InBlock((c1_header.hash(), 0)))); - assert_eq!(stream.next(), Some(TransactionStatus::Finalized((c1_header.hash(), 0)))); - assert_eq!(stream.next(), None); - } -} diff --git a/crates/node/Cargo.toml b/crates/node/Cargo.toml index 0c003f9cd0..5d7074e926 100644 --- a/crates/node/Cargo.toml +++ b/crates/node/Cargo.toml @@ -31,6 +31,7 @@ serde = { workspace = true } tokio = { workspace = true } frame-system = { workspace = true } +sc-basic-authorship = { workspace = true } sc-cli = { workspace = true } sc-client-api = { workspace = true } sc-consensus = { workspace = true } @@ -41,8 +42,10 @@ sc-executor = { workspace = true } sc-keystore = { workspace = true } sc-network = { workspace = true } sc-network-sync = { workspace = true } +sc-offchain = { workspace = true } sc-service = { workspace = true } sc-telemetry = { workspace = true } +sc-transaction-pool = { workspace = true } sc-transaction-pool-api = { workspace = true } sp-consensus-aura = { workspace = true } sp-consensus-grandpa = { workspace = true } @@ -52,6 +55,7 @@ sp-keyring = { workspace = true } sp-offchain = { workspace = true } sp-runtime = { workspace = true } sp-state-machine = { workspace = true } +sp-statement-store = { workspace = true } sp-timestamp = { workspace = true } sp-trie = { workspace = true, features = ["default"] } @@ -78,14 +82,12 @@ frame-benchmarking-cli = { workspace = true } blockifier = { workspace = true } hex = { workspace = true } madara-runtime = { workspace = true } -mc-block-proposer = { workspace = true } mc-commitment-state-diff = { workspace = true } mc-data-availability = { workspace = true } mc-db = { workspace = true } mc-mapping-sync = { workspace = true } mc-rpc = { workspace = true } mc-storage = { workspace = true } -mc-transaction-pool = { workspace = true } pallet-starknet = { workspace = true } starknet-core = { workspace = true } @@ -101,7 +103,7 @@ mp-felt = { workspace = true } mp-sequencer-address = { workspace = true, features = ["client"] } # CLI-specific dependencies -try-runtime-cli = { optional = true, git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.43" } +try-runtime-cli = { optional = true, git = "https://github.com/paritytech/polkadot-sdk", branch = "release-polkadot-v1.3.0" } #Deoxys diff --git a/crates/node/src/chain_spec.rs b/crates/node/src/chain_spec.rs index 6ff7d0693f..95171c8f8f 100644 --- a/crates/node/src/chain_spec.rs +++ b/crates/node/src/chain_spec.rs @@ -1,6 +1,6 @@ use std::path::PathBuf; -use madara_runtime::{AuraConfig, GenesisConfig, GrandpaConfig, SealingMode, SystemConfig, WASM_BINARY}; +use madara_runtime::{AuraConfig, GrandpaConfig, RuntimeGenesisConfig, SealingMode, SystemConfig, WASM_BINARY}; use mp_felt::Felt252Wrapper; use pallet_starknet::genesis_loader::{GenesisData, GenesisLoader, HexFelt}; use sc_service::{BasePath, ChainType}; @@ -17,7 +17,7 @@ pub const GENESIS_ASSETS_DIR: &str = "genesis-assets/"; pub const GENESIS_ASSETS_FILE: &str = "genesis.json"; /// Specialized `ChainSpec`. This is a specialization of the general Substrate ChainSpec type. -pub type ChainSpec = sc_service::GenericChainSpec; +pub type ChainSpec = sc_service::GenericChainSpec; /// Specialized `ChainSpec` for development. pub type DevChainSpec = sc_service::GenericChainSpec; @@ -26,7 +26,7 @@ pub type DevChainSpec = sc_service::GenericChainSpec; #[derive(Serialize, Deserialize)] pub struct DevGenesisExt { /// Genesis config. - genesis_config: GenesisConfig, + genesis_config: RuntimeGenesisConfig, /// The sealing mode being used. sealing: SealingMode, } @@ -167,18 +167,22 @@ fn testnet_genesis( wasm_binary: &[u8], initial_authorities: Vec<(AuraId, GrandpaId)>, _enable_println: bool, -) -> GenesisConfig { +) -> RuntimeGenesisConfig { let starknet_genesis_config: madara_runtime::pallet_starknet::GenesisConfig<_> = genesis_loader.into(); - GenesisConfig { + RuntimeGenesisConfig { system: SystemConfig { // Add Wasm runtime to storage. code: wasm_binary.to_vec(), + _config: Default::default(), }, // Authority-based consensus protocol used for block production aura: AuraConfig { authorities: initial_authorities.iter().map(|x| (x.0.clone())).collect() }, // Deterministic finality mechanism used for block finalization - grandpa: GrandpaConfig { authorities: initial_authorities.iter().map(|x| (x.1.clone(), 1)).collect() }, + grandpa: GrandpaConfig { + authorities: initial_authorities.iter().map(|x| (x.1.clone(), 1)).collect(), + _config: Default::default(), + }, /// Starknet Genesis configuration. starknet: starknet_genesis_config, } diff --git a/crates/node/src/command.rs b/crates/node/src/command.rs index f9294d4b23..4a1d31c37d 100644 --- a/crates/node/src/command.rs +++ b/crates/node/src/command.rs @@ -1,6 +1,6 @@ use frame_benchmarking_cli::{BenchmarkCmd, ExtrinsicFactory, SUBSTRATE_REFERENCE_HARDWARE}; use madara_runtime::Block; -use sc_cli::{ChainSpec, RuntimeVersion, SubstrateCli}; +use sc_cli::{ChainSpec, SubstrateCli}; use crate::benchmarking::{inherent_benchmark_data, RemarkBuilder}; use crate::cli::{Cli, Subcommand}; @@ -53,10 +53,6 @@ impl SubstrateCli for Cli { path_or_url => Box::new(chain_spec::ChainSpec::from_json_file(std::path::PathBuf::from(path_or_url))?), }) } - - fn native_runtime_version(_: &Box) -> &'static RuntimeVersion { - &madara_runtime::VERSION - } } /// Parse and run command line arguments @@ -126,7 +122,7 @@ pub fn run() -> sc_cli::Result<()> { .into()); } - cmd.run::(config) + cmd.run::(config) } BenchmarkCmd::Block(cmd) => { let (client, _, _, _, _) = service::new_chain_ops(&mut config, cli.run.cache)?; diff --git a/crates/node/src/commands/run.rs b/crates/node/src/commands/run.rs index a67f8b6f0b..5353693702 100644 --- a/crates/node/src/commands/run.rs +++ b/crates/node/src/commands/run.rs @@ -67,7 +67,7 @@ impl NetworkType { let gateway = format!("{uri}/gateway").parse().unwrap(); let feeder_gateway = format!("{uri}/feeder_gateway").parse().unwrap(); - mc_deoxys::BlockFetchConfig { gateway, feeder_gateway, chain_id, workers: 5 } + mc_deoxys::BlockFetchConfig { gateway, feeder_gateway, chain_id, workers: 5, sound: false } } } @@ -95,6 +95,10 @@ pub struct ExtendedRunCmd { #[clap(long)] pub cache: bool, + /// Yes. + #[clap(long)] + pub sound: bool, + #[clap(long)] pub deoxys: bool, } @@ -136,7 +140,8 @@ pub fn run_node(mut cli: Cli) -> Result<()> { runner.run_node_until_exit(|config| async move { let sealing = cli.run.sealing.map(Into::into).unwrap_or_default(); let cache = cli.run.cache; - let fetch_block_config = cli.run.network.block_fetch_config(); + let mut fetch_block_config = cli.run.network.block_fetch_config(); + fetch_block_config.sound = cli.run.sound; service::new_full(config, sealing, da_config, cli.run.base.rpc_port.unwrap(), cache, fetch_block_config) .map_err(sc_cli::Error::Service) }) diff --git a/crates/node/src/commands/setup.rs b/crates/node/src/commands/setup.rs index b1a925a80b..ff9b25e461 100644 --- a/crates/node/src/commands/setup.rs +++ b/crates/node/src/commands/setup.rs @@ -148,6 +148,7 @@ fn copy_file(src_path: &Path, dest_dir_path: &PathBuf) -> Result<()> { std::fs::create_dir_all(dest_dir_path)?; let dest_file_path = dest_dir_path.join(src_path.file_name().ok_or("File name not found")?); + println!("Copying '{}' to '{}'", src_path.display(), dest_file_path.display()); std::fs::copy(src_path, dest_file_path)?; Ok(()) diff --git a/crates/node/src/rpc/mod.rs b/crates/node/src/rpc/mod.rs index f3ff278170..dc4802763e 100644 --- a/crates/node/src/rpc/mod.rs +++ b/crates/node/src/rpc/mod.rs @@ -12,10 +12,10 @@ use futures::channel::mpsc; use jsonrpsee::RpcModule; use madara_runtime::opaque::Block; use madara_runtime::{AccountId, Hash, Index, StarknetHasher}; -use mc_transaction_pool::{ChainApi, Pool}; use sc_client_api::{Backend, BlockBackend, StorageProvider}; use sc_consensus_manual_seal::rpc::EngineCommand; pub use sc_rpc_api::DenyUnsafe; +use sc_transaction_pool::{ChainApi, Pool}; use sc_transaction_pool_api::TransactionPool; use sp_api::ProvideRuntimeApi; use sp_block_builder::BlockBuilder; diff --git a/crates/node/src/service.rs b/crates/node/src/service.rs index 60c31e3e46..be65db697e 100644 --- a/crates/node/src/service.rs +++ b/crates/node/src/service.rs @@ -11,7 +11,6 @@ use futures::future::BoxFuture; use futures::prelude::*; use madara_runtime::opaque::Block; use madara_runtime::{self, Hash, RuntimeApi, SealingMode, StarknetHasher}; -use mc_block_proposer::ProposerFactory; use mc_commitment_state_diff::{log_commitment_state_diff, CommitmentStateDiffWorker}; use mc_data_availability::avail::config::AvailConfig; use mc_data_availability::avail::AvailClient; @@ -22,12 +21,12 @@ use mc_data_availability::ethereum::EthereumClient; use mc_data_availability::{DaClient, DaLayer, DataAvailabilityWorker}; use mc_mapping_sync::MappingSyncWorker; use mc_storage::overrides_handle; -use mc_transaction_pool::FullPool; use mp_sequencer_address::{ InherentDataProvider as SeqAddrInherentDataProvider, DEFAULT_SEQUENCER_ADDRESS, SEQ_ADDR_STORAGE_KEY, }; use parity_scale_codec::Encode; use prometheus_endpoint::Registry; +use sc_basic_authorship::ProposerFactory; use sc_client_api::{Backend, BlockBackend, BlockchainEvents, HeaderBackend}; use sc_consensus::{BasicQueue, BlockImportParams}; use sc_consensus_aura::{SlotProportion, StartAuraParams}; @@ -37,8 +36,10 @@ pub use sc_executor::NativeElseWasmExecutor; use sc_service::error::Error as ServiceError; use sc_service::{new_db_backend, Configuration, TaskManager, WarpSyncParams}; use sc_telemetry::{Telemetry, TelemetryHandle, TelemetryWorker}; +use sc_transaction_pool::FullPool; +use sc_transaction_pool_api::OffchainTransactionPoolFactory; use sp_api::offchain::OffchainStorage; -use sp_api::{ConstructRuntimeApi, ProvideRuntimeApi, TransactionFor}; +use sp_api::{ConstructRuntimeApi, ProvideRuntimeApi}; use sp_consensus_aura::sr25519::AuthorityPair as AuraPair; use sp_inherents::InherentData; use sp_offchain::STORAGE_PREFIX; @@ -73,8 +74,12 @@ pub(crate) type FullClient = sc_service::TFullClient; type FullSelectChain = sc_consensus::LongestChain; -type BasicImportQueue = sc_consensus::DefaultImportQueue; -type BoxBlockImport = sc_consensus::BoxBlockImport>; +type BasicImportQueue = sc_consensus::DefaultImportQueue; +type BoxBlockImport = sc_consensus::BoxBlockImport; + +/// The minimum period of blocks on which justifications will be +/// imported and generated. +const GRANDPA_JUSTIFICATION_PERIOD: u32 = 512; #[allow(clippy::type_complexity)] pub fn new_partial( @@ -86,10 +91,10 @@ pub fn new_partial( FullClient, FullBackend, FullSelectChain, - sc_consensus::DefaultImportQueue, - mc_transaction_pool::FullPool, + sc_consensus::DefaultImportQueue, + sc_transaction_pool::FullPool, ( - BoxBlockImport, + BoxBlockImport, sc_consensus_grandpa::LinkHalf, Option, Arc, @@ -107,7 +112,7 @@ where Option, GrandpaBlockImport, Arc, - ) -> Result<(BasicImportQueue, BoxBlockImport), ServiceError>, + ) -> Result<(BasicImportQueue, BoxBlockImport), ServiceError>, { let telemetry = config .telemetry_endpoints @@ -154,8 +159,8 @@ where let select_chain = sc_consensus::LongestChain::new(backend.clone()); - let transaction_pool = mc_transaction_pool::BasicPool::new_full( - mc_transaction_pool::Options::from(config.transaction_pool.clone()), + let transaction_pool = sc_transaction_pool::BasicPool::new_full( + config.transaction_pool.clone(), config.role.is_authority().into(), config.prometheus_registry(), task_manager.spawn_essential_handle(), @@ -164,6 +169,7 @@ where let (grandpa_block_import, grandpa_link) = sc_consensus_grandpa::block_import( client.clone(), + GRANDPA_JUSTIFICATION_PERIOD, &client as &Arc<_>, select_chain.clone(), telemetry.as_ref().map(|x| x.handle()), @@ -200,7 +206,7 @@ pub fn build_aura_grandpa_import_queue( telemetry: Option, grandpa_block_import: GrandpaBlockImport, _madara_backend: Arc, -) -> Result<(BasicImportQueue, BoxBlockImport), ServiceError> +) -> Result<(BasicImportQueue, BoxBlockImport), ServiceError> where RuntimeApi: ConstructRuntimeApi, RuntimeApi: Send + Sync + 'static, @@ -241,7 +247,7 @@ pub fn build_manual_seal_import_queue( _telemetry: Option, _grandpa_block_import: GrandpaBlockImport, _madara_backend: Arc, -) -> Result<(BasicImportQueue, BoxBlockImport), ServiceError> +) -> Result<(BasicImportQueue, BoxBlockImport), ServiceError> where RuntimeApi: ConstructRuntimeApi, RuntimeApi: Send + Sync + 'static, @@ -313,10 +319,26 @@ pub fn new_full( import_queue, block_announce_validator_builder: None, warp_sync_params, + block_relay: None, })?; if config.offchain_worker.enabled { - sc_service::build_offchain_workers(&config, task_manager.spawn_handle(), client.clone(), network.clone()); + task_manager.spawn_handle().spawn( + "offchain-workers-runner", + "offchain-worker", + sc_offchain::OffchainWorkers::new(sc_offchain::OffchainWorkerOptions { + runtime_api_provider: client.clone(), + is_validator: config.role.is_authority(), + keystore: Some(keystore_container.keystore()), + offchain_db: backend.offchain_storage(), + transaction_pool: Some(OffchainTransactionPoolFactory::new(transaction_pool.clone())), + network_provider: network.clone(), + enable_http_requests: true, + custom_extensions: |_| vec![], + }) + .run(client.clone(), task_manager.spawn_handle()) + .boxed(), + ); } let role = config.role.clone(); @@ -456,6 +478,7 @@ pub fn new_full( &task_manager, prometheus_registry.as_ref(), commands_stream, + telemetry, )?; network_starter.start_network(); @@ -472,8 +495,9 @@ pub fn new_full( let proposer_factory = ProposerFactory::new( task_manager.spawn_handle(), client.clone(), - transaction_pool, + transaction_pool.clone(), prometheus_registry.as_ref(), + telemetry.as_ref().map(|x| x.handle()), ); let slot_duration = sc_consensus_aura::slot_duration(&*client)?; @@ -534,7 +558,7 @@ pub fn new_full( let grandpa_config = sc_consensus_grandpa::Config { // FIXME #1578 make this available through chainspec gossip_duration: Duration::from_millis(333), - justification_period: 512, + justification_generation_period: GRANDPA_JUSTIFICATION_PERIOD, name: Some(name), observer_enabled: false, keystore, @@ -558,6 +582,7 @@ pub fn new_full( prometheus_registry, shared_voter_state: SharedVoterState::empty(), telemetry: telemetry.as_ref().map(|x| x.handle()), + offchain_tx_pool_factory: OffchainTransactionPoolFactory::new(transaction_pool.clone()), }; // the GRANDPA voter task is considered infallible, i.e. @@ -581,10 +606,11 @@ fn run_manual_seal_authorship( client: Arc, transaction_pool: Arc>, select_chain: FullSelectChain, - block_import: BoxBlockImport, + block_import: BoxBlockImport, task_manager: &TaskManager, prometheus_registry: Option<&Registry>, commands_stream: Option>>, + telemetry: Option, ) -> Result<(), ServiceError> where RuntimeApi: ConstructRuntimeApi, @@ -595,6 +621,7 @@ where client.clone(), transaction_pool.clone(), prometheus_registry, + telemetry.as_ref().map(|x| x.handle()), ); thread_local!(static TIMESTAMP: RefCell = RefCell::new(0)); @@ -642,7 +669,6 @@ where B: BlockT, C: ProvideRuntimeApi + Send + Sync, { - type Transaction = TransactionFor; type Proof = (); fn create_digest(&self, _parent: &B::Header, _inherents: &InherentData) -> Result { @@ -656,7 +682,7 @@ where fn append_block_import( &self, _parent: &B::Header, - params: &mut BlockImportParams, + params: &mut BlockImportParams, _inherents: &InherentData, _proof: Self::Proof, ) -> Result<(), Error> { @@ -705,16 +731,8 @@ where Ok(()) } -type ChainOpsResult = Result< - ( - Arc, - Arc, - BasicQueue>, - TaskManager, - Arc, - ), - ServiceError, ->; +type ChainOpsResult = + Result<(Arc, Arc, BasicQueue, TaskManager, Arc), ServiceError>; pub fn new_chain_ops(config: &mut Configuration, cache_more_things: bool) -> ChainOpsResult { config.keystore = sc_service::config::KeystoreConfig::InMemory; diff --git a/crates/pallets/starknet/src/lib.rs b/crates/pallets/starknet/src/lib.rs index 05ba85806d..9eb8fc1ac6 100644 --- a/crates/pallets/starknet/src/lib.rs +++ b/crates/pallets/starknet/src/lib.rs @@ -57,7 +57,6 @@ mod offchain_worker; use blockifier::execution::entry_point::{CallEntryPoint, CallType, EntryPointExecutionContext}; use blockifier::state::cached_state::ContractStorageKey; -use blockifier::state::state_api::State; use blockifier::transaction::objects::{TransactionExecutionInfo, TransactionExecutionResult}; use starknet_api::state::StorageKey; use starknet_api::transaction::{Calldata, Event as StarknetEvent, Fee}; @@ -85,7 +84,6 @@ use mp_fee::INITIAL_GAS; use mp_felt::Felt252Wrapper; use mp_hashers::HasherT; use mp_sequencer_address::{InherentError, InherentType, DEFAULT_SEQUENCER_ADDRESS, INHERENT_IDENTIFIER}; -use mp_state::{FeeConfig, StateChanges}; use mp_storage::{StarknetStorageSchemaVersion, PALLET_STARKNET_SCHEMA}; use mp_transactions::execution::{Execute, Validate}; use mp_transactions::{ @@ -177,7 +175,7 @@ pub mod pallet { #[pallet::hooks] impl Hooks> for Pallet { /// The block is being finalized. - fn on_finalize(_n: T::BlockNumber) { + fn on_finalize(_n: BlockNumberFor) { assert!(SeqAddrUpdate::::take(), "Sequencer address must be set for the block"); // Create a new Starknet block and store it. @@ -187,58 +185,8 @@ pub mod pallet { } /// The block is being initialized. Implement to have something happen. - fn on_initialize(_: T::BlockNumber) -> Weight { - // log!(info, "{:?}", Decode::decode(&mut frame_system::Pallet::::digest().logs()[0])); - // if frame_system::Pallet::::digest().logs().len() == 1 { - - // match &frame_system::Pallet::::digest().logs()[0] { - // DigestItem::PreRuntime(mp_digest_log::MADARA_ENGINE_ID ,encoded_data) => { - - // if let Ok(block) = mp_starknet::block::Block::decode(&mut encoded_data.as_slice()) { - // let block_transactions = block.transactions(); - // for tx in block_transactions { - // let tx_type = tx.tx_type.clone(); - // let contract_class: Option = match tx.contract_class.clone() { - // Some(wrapper) => { - // match ContractClass::try_from(wrapper) { - // Ok(contract_class) => Some(contract_class), - // Err(e) => { - // log!(info,"Error while converting ContractClassWrapper to ContractClass: - // {:?}", e); None - // } - // } - // }, - // None => None, - // }; - - // // Self::validate_tx(tx.clone(), tx_type.clone()).expect( - // // "pre-block transaction verification failed; the block cannot be built", - // // ); - // // let block_context = Self::get_block_context(); - // // match tx.execute( - // // &mut BlockifierStateAdapter::::default(), - // // &block_context, - // // tx_type, - // // contract_class, - // // ) { - // // Ok(v) => { - // // log!(debug, "Transaction executed successfully: {:?}", v); - // // } - // // Err(e) => { - // // log!(error, "Transaction execution failed: {:?}", e); - // // return Weight::zero(); //error - // // } - // // } - // } - // } else { - // log!(info, "PreRuntime digest is not a block"); - // } - // } - // _ => { - // log!(info, "No PreRuntime Digest found"); - // } - // } - // } + fn on_initialize(_: BlockNumberFor) -> Weight { + log!(info, "Initializing block."); Weight::zero() } @@ -251,8 +199,8 @@ pub mod pallet { /// See: `` /// # Arguments /// * `n` - The block number. - fn offchain_worker(n: T::BlockNumber) { - // log!(info, "Running offchain worker at block {:?}.", n); + fn offchain_worker(n: BlockNumberFor) { + log!(info, "Running offchain worker at block {:?}.", n); // match Self::process_l1_messages() { // Ok(_) => log!(info, "Successfully executed L1 messages"), @@ -416,7 +364,7 @@ pub mod pallet { } #[pallet::genesis_build] - impl GenesisBuild for GenesisConfig { + impl BuildGenesisConfig for GenesisConfig { fn build(&self) { >::store_block(0); frame_support::storage::unhashed::put::( @@ -1168,64 +1116,82 @@ impl Pallet { } /// Estimate the fee associated with transaction - pub fn estimate_fee(transaction: UserTransaction, is_query: bool) -> Result<(u64, u64), DispatchError> { + pub fn estimate_fee(transactions: Vec) -> Result, DispatchError> { let chain_id = Self::chain_id(); - fn execute_tx_and_rollback( - tx: impl Execute, - state: &mut S, + fn execute_txs_and_rollback( + txs: Vec, block_context: &BlockContext, disable_nonce_validation: bool, - ) -> TransactionExecutionResult { - // TODO: initialization can probably be skiped by using mem::MaybeUninit - let mut execution_result = Ok(Default::default()); + chain_id: Felt252Wrapper, + ) -> Vec> { + let mut execution_results = vec![]; let _: Result<_, DispatchError> = storage::transactional::with_transaction(|| { - execution_result = tx.execute(state, block_context, true, disable_nonce_validation); + for tx in txs { + let result = match tx { + UserTransaction::Declare(tx, contract_class) => { + let executable = tx + .try_into_executable::(chain_id, contract_class, true) + .map_err(|_| Error::::InvalidContractClass) + .expect("Contract class should be valid"); + executable.execute( + &mut BlockifierStateAdapter::::default(), + block_context, + true, + disable_nonce_validation, + ) + } + UserTransaction::DeployAccount(tx) => { + let executable = tx.into_executable::(chain_id, true); + executable.execute( + &mut BlockifierStateAdapter::::default(), + block_context, + true, + disable_nonce_validation, + ) + } + UserTransaction::Invoke(tx) => { + let executable = tx.into_executable::(chain_id, true); + executable.execute( + &mut BlockifierStateAdapter::::default(), + block_context, + true, + disable_nonce_validation, + ) + } + }; + execution_results.push(result); + } storage::TransactionOutcome::Rollback(Ok(())) }); - execution_result + execution_results } - let mut blockifier_state_adapter = BlockifierStateAdapter::::default(); - let block_context = Self::get_block_context(); - let disable_nonce_validation = T::DisableNonceValidation::get(); - - let execution_result = match transaction { - UserTransaction::Declare(tx, contract_class) => execute_tx_and_rollback( - tx.try_into_executable::(chain_id, contract_class, is_query) - .map_err(|_| Error::::InvalidContractClass)?, - &mut blockifier_state_adapter, - &block_context, - disable_nonce_validation, - ), - UserTransaction::DeployAccount(tx) => execute_tx_and_rollback( - tx.into_executable::(chain_id, is_query), - &mut blockifier_state_adapter, - &block_context, - disable_nonce_validation, - ), - UserTransaction::Invoke(tx) => execute_tx_and_rollback( - tx.into_executable::(chain_id, is_query), - &mut blockifier_state_adapter, - &block_context, - disable_nonce_validation, - ), - }; - - match execution_result { - Ok(tx_exec_info) => { - log!(debug, "Successfully estimated fee: {:?}", tx_exec_info); - if let Some(gas_usage) = tx_exec_info.actual_resources.0.get("l1_gas_usage") { - Ok((tx_exec_info.actual_fee.0 as u64, *gas_usage as u64)) - } else { - Err(Error::::TransactionExecutionFailed.into()) + let execution_results = execute_txs_and_rollback::( + transactions, + &Self::get_block_context(), + T::DisableNonceValidation::get(), + chain_id, + ); + + let mut results = vec![]; + for res in execution_results { + match res { + Ok(tx_exec_info) => { + log!(info, "Successfully estimated fee: {:?}", tx_exec_info); + if let Some(l1_gas_usage) = tx_exec_info.actual_resources.0.get("l1_gas_usage") { + results.push((tx_exec_info.actual_fee.0 as u64, *l1_gas_usage as u64)); + } else { + return Err(Error::::TransactionExecutionFailed.into()); + } + } + Err(e) => { + log!(info, "Failed to estimate fee: {:?}", e); + return Err(Error::::TransactionExecutionFailed.into()); } - } - Err(e) => { - log!(error, "Failed to estimate fee: {:?}", e); - Err(Error::::TransactionExecutionFailed.into()) } } + Ok(results) } pub fn emit_and_store_tx_and_fees_events( @@ -1250,4 +1216,7 @@ impl Pallet { pub fn chain_id() -> Felt252Wrapper { T::ChainId::get() } + pub fn is_transaction_fee_disabled() -> bool { + T::DisableTransactionFee::get() + } } diff --git a/crates/pallets/starknet/src/runtime_api.rs b/crates/pallets/starknet/src/runtime_api.rs index 466fd5bc6e..81137fad02 100644 --- a/crates/pallets/starknet/src/runtime_api.rs +++ b/crates/pallets/starknet/src/runtime_api.rs @@ -9,7 +9,7 @@ use alloc::sync::Arc; use blockifier::execution::contract_class::ContractClass; use mp_felt::Felt252Wrapper; -use mp_transactions::{Transaction, TxType, UserTransaction}; +use mp_transactions::{Transaction, UserTransaction}; use sp_api::BlockT; pub extern crate alloc; use alloc::string::String; @@ -46,7 +46,7 @@ sp_api::decl_runtime_apis! { /// Returns the chain id. fn chain_id() -> Felt252Wrapper; /// Returns fee estimate - fn estimate_fee(transaction: UserTransaction, is_query: bool) -> Result<(u64, u64), DispatchError>; + fn estimate_fee(transactions: Vec) -> Result, DispatchError>; /// Filters extrinsic transactions to return only Starknet transactions /// /// To support runtime upgrades, the client must be unaware of the specific extrinsic @@ -56,7 +56,9 @@ sp_api::decl_runtime_apis! { /// the runtime itself, accomplished through the extrinsic_filter method. This enables the /// client to operate seamlessly while abstracting the extrinsic complexity. fn extrinsic_filter(xts: Vec<::Extrinsic>) -> Vec; - fn get_events_for_tx_hash(xts: Vec<::Extrinsic>, chain_id: Felt252Wrapper, tx_hash: Felt252Wrapper) -> Option<(TxType, Vec)>; + fn get_index_and_tx_for_tx_hash(xts: Vec<::Extrinsic>, chain_id: Felt252Wrapper, tx_hash: Felt252Wrapper) -> Option<(u32, Transaction)>; + /// Returns events, call with index from get_index_and_tx_for_tx_hash method + fn get_events_for_tx_by_index(tx_index: u32) -> Option>; /// Return the list of StarknetEvent evmitted during this block, along with the hash of the starknet transaction they bellong to /// @@ -66,6 +68,8 @@ sp_api::decl_runtime_apis! { fn get_tx_execution_outcome(tx_hash: TransactionHash) -> Option>; /// Return the block context fn get_block_context() -> BlockContext; + /// Return is fee disabled in state + fn is_transaction_fee_disabled() -> bool; } pub trait ConvertTransactionRuntimeApi { diff --git a/crates/pallets/starknet/src/tests/deploy_account_tx.rs b/crates/pallets/starknet/src/tests/deploy_account_tx.rs index 7b416b1e66..d4491dfd59 100644 --- a/crates/pallets/starknet/src/tests/deploy_account_tx.rs +++ b/crates/pallets/starknet/src/tests/deploy_account_tx.rs @@ -7,6 +7,7 @@ use sp_runtime::transaction_validity::{InvalidTransaction, TransactionSource, Tr use starknet_api::api_core::{ContractAddress, Nonce}; use starknet_api::hash::StarkFelt; use starknet_api::transaction::{Event as StarknetEvent, EventContent, EventData, EventKey}; +use starknet_core::utils::get_selector_from_name; use starknet_crypto::FieldElement; use super::mock::default_mock::*; @@ -45,7 +46,7 @@ fn given_contract_run_deploy_account_tx_works() { let expected_fee_transfer_event = Event::StarknetEvent(StarknetEvent { content: EventContent { keys: vec![EventKey( - StarkFelt::try_from("0x0099cd8bde557814842a3121e8ddfd433a539b8c9f14bf31ebf108d12e6196e9").unwrap(), + StarkFelt::try_from(get_selector_from_name(mp_fee::TRANSFER_SELECTOR_NAME).unwrap()).unwrap(), )], data: EventData(vec![ address.0.0, // From diff --git a/crates/pallets/starknet/src/tests/erc20.rs b/crates/pallets/starknet/src/tests/erc20.rs index 40444250da..6898bcda14 100644 --- a/crates/pallets/starknet/src/tests/erc20.rs +++ b/crates/pallets/starknet/src/tests/erc20.rs @@ -7,6 +7,7 @@ use starknet_api::api_core::{ContractAddress, PatriciaKey}; use starknet_api::hash::StarkFelt; use starknet_api::state::StorageKey; use starknet_api::transaction::{Event as StarknetEvent, EventContent, EventData, EventKey}; +use starknet_core::utils::get_selector_from_name; use super::mock::default_mock::*; use super::mock::*; @@ -98,7 +99,7 @@ fn given_erc20_transfer_when_invoke_then_it_works() { let expected_fee_transfer_event = Event::StarknetEvent(StarknetEvent { content: EventContent { keys: vec![EventKey( - StarkFelt::try_from("0x0099cd8bde557814842a3121e8ddfd433a539b8c9f14bf31ebf108d12e6196e9").unwrap(), + StarkFelt::try_from(get_selector_from_name(mp_fee::TRANSFER_SELECTOR_NAME).unwrap()).unwrap(), )], data: EventData(vec![ sender_account.0.0, // From @@ -171,7 +172,7 @@ fn given_erc20_transfer_when_invoke_then_it_works() { let expected_event = Event::StarknetEvent(StarknetEvent { content: EventContent { keys: vec![ - EventKey(StarkFelt::try_from("0x0099cd8bde557814842a3121e8ddfd433a539b8c9f14bf31ebf108d12e6196e9") + EventKey(StarkFelt::try_from(get_selector_from_name(mp_fee::TRANSFER_SELECTOR_NAME).unwrap()) .unwrap()), ], data: EventData(vec![ @@ -192,7 +193,7 @@ fn given_erc20_transfer_when_invoke_then_it_works() { let expected_fee_transfer_event = Event::StarknetEvent(StarknetEvent { content: EventContent { keys: vec![ - EventKey(StarkFelt::try_from("0x0099cd8bde557814842a3121e8ddfd433a539b8c9f14bf31ebf108d12e6196e9") + EventKey(StarkFelt::try_from(get_selector_from_name(mp_fee::TRANSFER_SELECTOR_NAME).unwrap()) .unwrap()), ], data: EventData(vec![ diff --git a/crates/pallets/starknet/src/tests/invoke_tx.rs b/crates/pallets/starknet/src/tests/invoke_tx.rs index e0d643f937..a370da6bf5 100644 --- a/crates/pallets/starknet/src/tests/invoke_tx.rs +++ b/crates/pallets/starknet/src/tests/invoke_tx.rs @@ -84,14 +84,13 @@ fn given_hardcoded_contract_run_invoke_tx_then_it_works() { StarkFelt::try_from("0x02dfd0ded452658d67535279591c1ed9898431e1eafad7896239f0bfa68493d6").unwrap() ) ); - assert!(System::events().into_iter().map(|event_record| event_record.event).any(|e| match e { RuntimeEvent::Starknet(Event::StarknetEvent(e)) => { e == StarknetEvent { from_address: Starknet::fee_token_address(), content: EventContent { keys: vec![EventKey( - StarkFelt::try_from("0x0099cd8bde557814842a3121e8ddfd433a539b8c9f14bf31ebf108d12e6196e9") + StarkFelt::try_from(get_selector_from_name(mp_fee::TRANSFER_SELECTOR_NAME).unwrap()) .unwrap(), )], data: EventData(vec![ @@ -137,7 +136,7 @@ fn given_hardcoded_contract_run_invoke_tx_then_event_is_emitted() { from_address: Starknet::fee_token_address(), content: EventContent { keys: vec![EventKey( - StarkFelt::try_from("0x0099cd8bde557814842a3121e8ddfd433a539b8c9f14bf31ebf108d12e6196e9") + StarkFelt::try_from(get_selector_from_name(mp_fee::TRANSFER_SELECTOR_NAME).unwrap()) .unwrap(), )], data: EventData(vec![ diff --git a/crates/pallets/starknet/src/tests/mock/genesis.json b/crates/pallets/starknet/src/tests/mock/genesis.json index d258caaf99..2698b3a96c 100644 --- a/crates/pallets/starknet/src/tests/mock/genesis.json +++ b/crates/pallets/starknet/src/tests/mock/genesis.json @@ -147,6 +147,27 @@ ] ], "storage": [ + [ + [ + "0x00000000000000000000000000000000000000000000000000000000000000AA", + "0x0341c1bdfd89f69748aa00b5742b03adbffd79b8e80cab5c50d91cd8c2a79be1" + ], + "0x4574686572" + ], + [ + [ + "0x00000000000000000000000000000000000000000000000000000000000000AA", + "0x00b6ce5410fca59d078ee9b2a4371a9d684c530d697c64fbef0ae6d5e8f0ac72" + ], + "0x455448" + ], + [ + [ + "0x00000000000000000000000000000000000000000000000000000000000000AA", + "0x01f0d4aa99431d246bac9b8e48c33e888245b15e9678f64f9bdfc8823dc8f979" + ], + "0x12" + ], [ [ "0x00000000000000000000000000000000000000000000000000000000000000AA", diff --git a/crates/pallets/starknet/src/tests/mock/setup_mock.rs b/crates/pallets/starknet/src/tests/mock/setup_mock.rs index fbdb7cc356..e2c43005c6 100644 --- a/crates/pallets/starknet/src/tests/mock/setup_mock.rs +++ b/crates/pallets/starknet/src/tests/mock/setup_mock.rs @@ -1,4 +1,4 @@ -use frame_support::traits::GenesisBuild; +use sp_runtime::BuildStorage; use crate::genesis_loader::{GenesisData, GenesisLoader}; use crate::{Config, GenesisConfig}; @@ -10,7 +10,6 @@ macro_rules! mock_runtime { use frame_support::parameter_types; use frame_support::traits::{ConstU16, ConstU64}; use sp_core::H256; - use sp_runtime::testing::Header; use sp_runtime::traits::{BlakeTwo256, IdentityLookup}; use {crate as pallet_starknet, frame_system as system}; use crate::{ SeqAddrUpdate, SequencerAddress}; @@ -21,15 +20,10 @@ macro_rules! mock_runtime { use starknet_api::hash::StarkFelt; - type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; frame_support::construct_runtime!( - pub enum MockRuntime where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic, - { + pub enum MockRuntime { System: frame_system, Starknet: pallet_starknet, Timestamp: pallet_timestamp, @@ -50,13 +44,12 @@ macro_rules! mock_runtime { type DbWeight = (); type RuntimeOrigin = RuntimeOrigin; type RuntimeCall = RuntimeCall; - type Index = u64; - type BlockNumber = u64; + type Nonce = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = u64; type Lookup = IdentityLookup; - type Header = Header; + type Block = Block; type RuntimeEvent = RuntimeEvent; type BlockHashCount = ConstU64<250>; type Version = (); @@ -124,7 +117,7 @@ macro_rules! mock_runtime { // Build genesis storage according to the mock runtime. pub fn new_test_ext() -> sp_io::TestExternalities { - let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); + let mut t = frame_system::GenesisConfig::::default().build_storage().unwrap(); let genesis_data: GenesisData = serde_json::from_str(std::include_str!("./genesis.json")).unwrap(); let genesis_loader = GenesisLoader::new(project_root::get_project_root().unwrap(), genesis_data); diff --git a/crates/pallets/starknet/src/tests/query_tx.rs b/crates/pallets/starknet/src/tests/query_tx.rs index 215fb7aae1..25b98ef0d7 100644 --- a/crates/pallets/starknet/src/tests/query_tx.rs +++ b/crates/pallets/starknet/src/tests/query_tx.rs @@ -14,19 +14,23 @@ fn estimates_tx_fee_successfully_no_validate() { new_test_ext::().execute_with(|| { basic_test_setup(2); - let tx = get_invoke_dummy(Felt252Wrapper::ZERO); - let tx = UserTransaction::Invoke(tx.into()); + let tx_1: mp_transactions::InvokeTransactionV1 = get_storage_read_write_dummy(); + let tx_1 = UserTransaction::Invoke(tx_1.into()); - let (actual, l1_gas_usage) = Starknet::estimate_fee(tx, true).unwrap(); - assert!(actual > 0, "actual fee is missing"); - assert!(l1_gas_usage == 0, "this should not be charged any l1_gas as it does not store nor send messages"); + let tx_2 = get_invoke_dummy(Felt252Wrapper::ONE); + let tx_2 = UserTransaction::Invoke(tx_2.into()); - let tx = get_storage_read_write_dummy(); - let tx = UserTransaction::Invoke(tx.into()); + let txs = vec![tx_1, tx_2]; + + let fees = Starknet::estimate_fee(txs).expect("estimate should not fail"); - let (actual, l1_gas_usage) = Starknet::estimate_fee(tx, true).unwrap(); + let (actual, l1_gas_usage) = fees[0]; assert!(actual > 0, "actual fee is missing"); assert!(l1_gas_usage > 0, "this should be charged l1_gas as it store a value to storage"); + + let (actual, l1_gas_usage) = fees[1]; + assert!(actual > 0, "actual fee is missing"); + assert!(l1_gas_usage == 0, "this should not be charged any l1_gas as it does not store nor send messages"); }); } @@ -39,7 +43,9 @@ fn estimates_tx_fee_with_query_version() { let pre_storage = Starknet::pending().len(); let tx = UserTransaction::Invoke(tx.into()); - assert_ok!(Starknet::estimate_fee(tx, true)); + let tx_vec = vec![tx]; + + assert_ok!(Starknet::estimate_fee(tx_vec)); assert!(pre_storage == Starknet::pending().len(), "estimate should not add a tx to pending"); }); @@ -55,11 +61,10 @@ fn executable_tx_should_not_be_estimable() { let tx_hash = tx.compute_hash::<::SystemHash>(chain_id, false); tx.signature = sign_message_hash(tx_hash); + let tx_vec = vec![UserTransaction::Invoke(tx.clone().into())]; + // it should not be valid for estimate calls - assert_err!( - Starknet::estimate_fee(UserTransaction::Invoke(tx.clone().into()), true), - Error::::TransactionExecutionFailed - ); + assert_err!(Starknet::estimate_fee(tx_vec), Error::::TransactionExecutionFailed); // it should be executable assert_ok!(Starknet::invoke(RuntimeOrigin::none(), tx.clone().into())); @@ -76,8 +81,10 @@ fn query_tx_should_not_be_executable() { let tx_hash = tx.compute_hash::<::SystemHash>(chain_id, true); tx.signature = sign_message_hash(tx_hash); + let tx_vec = vec![UserTransaction::Invoke(tx.clone().into())]; + // it should be valid for estimate calls - assert_ok!(Starknet::estimate_fee(UserTransaction::Invoke(tx.clone().into()), true),); + assert_ok!(Starknet::estimate_fee(tx_vec)); // it should not be executable assert_err!( diff --git a/crates/primitives/fee/src/lib.rs b/crates/primitives/fee/src/lib.rs index d043cf21d0..da89a49e85 100644 --- a/crates/primitives/fee/src/lib.rs +++ b/crates/primitives/fee/src/lib.rs @@ -48,6 +48,12 @@ pub static VM_RESOURCE_FEE_COSTS: [(&str, FixedU128); 7] = [ ("ec_op_builtin", FixedU128::from_inner(10_240_000_000_000_000_000)), ]; +pub const TRANSFER_SELECTOR_NAME: &str = "Transfer"; +pub const TRANSFER_SELECTOR_HASH: [u8; 32] = [ + 0, 131, 175, 211, 244, 202, 237, 198, 238, 191, 68, 36, 111, 229, 78, 56, 201, 94, 49, 121, 165, 236, 158, 168, 23, + 64, 236, 165, 180, 130, 209, 46, +]; // starknet_keccak(TRANSFER_SELECTOR_NAME.as_bytes()).to_le_bytes(); + /// Gets the transaction resources. pub fn compute_transaction_resources( state: &S, @@ -115,20 +121,14 @@ fn execute_fee_transfer( let lsb_amount = StarkFelt::from(actual_fee.0); // The most significant 128 bits of the amount transferred. let msb_amount = StarkFelt::from(0_u64); - let storage_address = block_context.fee_token_address; let fee_transfer_call = CallEntryPoint { class_hash: None, code_address: None, entry_point_type: EntryPointType::External, - entry_point_selector: EntryPointSelector( - // The value is hardcoded and it's the encoding of the "transfer" selector so it cannot fail. - StarkFelt::new([ - 0, 131, 175, 211, 244, 202, 237, 198, 238, 191, 68, 36, 111, 229, 78, 56, 201, 94, 49, 121, 165, 236, - 158, 168, 23, 64, 236, 165, 180, 130, 209, 46, - ]) - .unwrap(), - ), + // The value TRANSFER_SELECTOR_HASH is hardcoded and it's the encoding of the "transfer" selector so it cannot + // fail. + entry_point_selector: EntryPointSelector(StarkFelt::new(TRANSFER_SELECTOR_HASH).unwrap()), calldata: calldata![ *block_context.sequencer_address.0.key(), // Recipient. lsb_amount, diff --git a/crates/runtime/src/lib.rs b/crates/runtime/src/lib.rs index 8b5af4c763..7ce6ee0623 100644 --- a/crates/runtime/src/lib.rs +++ b/crates/runtime/src/lib.rs @@ -29,7 +29,7 @@ pub use frame_system::Call as SystemCall; use frame_system::{EventRecord, Phase}; use mp_felt::Felt252Wrapper; use mp_transactions::compute_hash::ComputeTransactionHash; -use mp_transactions::{Transaction, TxType, UserTransaction}; +use mp_transactions::{Transaction, UserTransaction}; use pallet_grandpa::{fg_primitives, AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList}; /// Import the StarkNet pallet. pub use pallet_starknet; @@ -59,12 +59,7 @@ pub use types::*; // Create the runtime by composing the FRAME pallets that were previously configured. construct_runtime!( - pub struct Runtime - where - Block = Block, - NodeBlock = opaque::Block, - UncheckedExtrinsic = UncheckedExtrinsic, - { + pub struct Runtime { System: frame_system, Timestamp: pallet_timestamp, Aura: pallet_aura, @@ -263,8 +258,12 @@ impl_runtime_apis! { Starknet::chain_id() } - fn estimate_fee(transaction: UserTransaction, is_query: bool) -> Result<(u64, u64), DispatchError> { - Starknet::estimate_fee(transaction, is_query) + fn is_transaction_fee_disabled() -> bool { + Starknet::is_transaction_fee_disabled() + } + + fn estimate_fee(transactions: Vec) -> Result, DispatchError> { + Starknet::estimate_fee(transactions) } fn get_starknet_events_and_their_associated_tx_hash(block_extrinsics: Vec<::Extrinsic>, chain_id: Felt252Wrapper) -> Vec<(Felt252Wrapper, StarknetEvent)> { @@ -302,7 +301,7 @@ impl_runtime_apis! { }).collect::>() } - fn get_events_for_tx_hash(extrinsics: Vec<::Extrinsic>, chain_id: Felt252Wrapper, tx_hash: Felt252Wrapper) -> Option<(TxType, Vec)> { + fn get_index_and_tx_for_tx_hash(extrinsics: Vec<::Extrinsic>, chain_id: Felt252Wrapper, tx_hash: Felt252Wrapper) -> Option<(u32, Transaction)> { // Find our tx and it's index let (tx_index, tx) = extrinsics.into_iter().enumerate().find(|(_, xt)| { let computed_tx_hash = match &xt.function { @@ -315,16 +314,19 @@ impl_runtime_apis! { computed_tx_hash == tx_hash })?; - - // Compute it's tx type - let tx_type = match tx.function { - RuntimeCall::Starknet( invoke { .. }) => TxType::Invoke, - RuntimeCall::Starknet( declare { .. }) => TxType::Declare, - RuntimeCall::Starknet( deploy_account { .. }) => TxType::DeployAccount, - RuntimeCall::Starknet( consume_l1_message { .. }) => TxType::L1Handler, - _ => panic!("The previous match made sure that at this point tx is one of those starknet calls"), + let transaction = match tx.function { + RuntimeCall::Starknet( invoke { transaction }) => Transaction::Invoke(transaction), + RuntimeCall::Starknet( declare { transaction, .. }) => Transaction::Declare(transaction), + RuntimeCall::Starknet( deploy_account { transaction }) => Transaction::DeployAccount(transaction), + RuntimeCall::Starknet( consume_l1_message { transaction, .. }) => Transaction::L1Handler(transaction), + _ => unreachable!("The previous match made sure that at this point tx is one of those starknet calls"), }; + let tx_index = u32::try_from(tx_index).expect("unexpected number of transactions"); + Some((tx_index, transaction)) + } + + fn get_events_for_tx_by_index(tx_index: u32) -> Option> { // Skip all the events that are not related to our tx let event_iter = System::read_events_no_consensus().filter_map(|event| { @@ -338,7 +340,7 @@ impl_runtime_apis! { _ => return true }; - tx_index as u32 != index + tx_index != index }); // Collect all the events related to our tx @@ -350,10 +352,10 @@ impl_runtime_apis! { _ => panic!("The previous iteration made sure at this point phase is of ApplyExtrinsic variant"), }; - tx_index as u32 == index + tx_index == index }).map(|(_, event)| event).collect(); - Some((tx_type, events)) + Some(events) } fn get_tx_execution_outcome(tx_hash: TransactionHash) -> Option> { diff --git a/crates/runtime/src/pallets.rs b/crates/runtime/src/pallets.rs index 201a515199..133f438b7a 100644 --- a/crates/runtime/src/pallets.rs +++ b/crates/runtime/src/pallets.rs @@ -15,7 +15,6 @@ pub use mp_chain_id::SN_GOERLI_CHAIN_ID; pub use pallet_starknet; pub use pallet_timestamp::Call as TimestampCall; use sp_consensus_aura::sr25519::AuthorityId as AuraId; -use sp_runtime::generic; use sp_runtime::traits::{AccountIdLookup, BlakeTwo256}; #[cfg(any(feature = "std", test))] pub use sp_runtime::BuildStorage; @@ -68,15 +67,13 @@ impl frame_system::Config for Runtime { /// The lookup mechanism to get account ID from whatever is passed in dispatchers. type Lookup = AccountIdLookup; /// The index type for storing how many extrinsics an account has signed. - type Index = Index; - /// The index type for blocks. - type BlockNumber = BlockNumber; + type Nonce = Index; /// The type for hashing blocks and tries. type Hash = Hash; /// The hashing algorithm used. type Hashing = BlakeTwo256; - /// The header type. - type Header = generic::Header; + /// The Block type. + type Block = Block; /// The ubiquitous event type. type RuntimeEvent = RuntimeEvent; /// The ubiquitous origin type. @@ -123,6 +120,7 @@ impl pallet_aura::Config for Runtime { type AuthorityId = AuraId; type DisabledValidators = (); type MaxAuthorities = ConstU32<32>; + type AllowMultipleBlocksPerSlot = ConstBool; } /// Deterministic finality mechanism used for block finalization. @@ -133,6 +131,7 @@ impl pallet_grandpa::Config for Runtime { type WeightInfo = (); type MaxAuthorities = ConstU32<32>; type MaxSetIdSessionEntries = ConstU64<0>; + type MaxNominators = ConstU32<1000>; type KeyOwnerProof = sp_core::Void; type EquivocationReportSystem = (); diff --git a/docs/content/articles/cn/madara-beast-article.md b/docs/content/articles/cn/madara-beast-article.md index 5541e8153b..7fed49c1e8 100644 --- a/docs/content/articles/cn/madara-beast-article.md +++ b/docs/content/articles/cn/madara-beast-article.md @@ -1,8 +1,8 @@ ![thee BEEAAST](https://imgur.com/EBwBNnB.jpg) -# 驾驭巨兽 - Madara和Starknet应用链的革新 +# 驾驭巨兽 - Madara 和 Starknet 应用链的革新 -**2023年7月20日** · 1分钟阅读时间 +**2023 年 7 月 20 日** · 1 分钟阅读时间 _探索区块链技术的未来, 从大幅降低成本到个性化控制_ @@ -10,20 +10,20 @@ ## 概述 -- Madara是一个高性能的Starknet排序器,提供了创建定制化和高效[应用链](https://www.starknet.io/en/posts/ecosystem/the-starknet-stacks-growth-spurt)的能力。 -- 通过使用Substrate框架,Madara强化了Cairo VM的能力,从而实现可证明、安全且灵 - 活的程序。 - 实施它可以带来诸多好处,譬如可扩展的基础设施、高吞吐量和对应用程序前所未有的 控制。 -- Madara有包括支持潜在的链上隐私、流畅的跨链互操作性以及强大的执行能力这类独特 +- Madara 有包括支持潜在的链上隐私、流畅的跨链互操作性以及强大的执行能力这类独特 的功能。 -- Madara正向区块链领域提供具有高效成本、可扩展和可定制的解决方案,来推动dApp的 - 开发迈向前所未有的领域。 ## 引言 @@ -34,110 +34,115 @@ 足其应用的需求,例如选择不同的哈希函数或自定义共识算法。最棒的是,由于应用链建 立在L1或L2区块链之上,可以继承其强大的安全性,为开发人员提供了两全其美的解决方案。 +-->L1 或 L2 区块链之上,可以继承其强大的安全性,为开发人员提供了两全其美的解决方案。 -介绍下Madara,这是一个将灵活性和极速性能相结合的划时代的排序器。排序器这一组件 +介绍下 Madara,这是一个将灵活性和极速性能相结合的划时代的排序器。排序器这一组件 负责执行交易并将它们分组到批次中。作为通往属于你的Starknet应用链的入口,Madara为 -在Starknet生态系统中进行前所未有的实验开辟了广阔的可能性。 +-->责执行交易并将它们分组到批次中。作为通往属于你的 Starknet 应用链的入口,Madara +为在Starknet 生态系统中进行前所未有的实验开辟了广阔的可能性。 -在我们深入探讨Madara如何为Starknet应用链带来强大的能力前,有必要解决一个问题: -为什么开发人员会选择在Starknet上构建应用链,而不是直接使用[Starknet有效性Rollups](https://starkware.co/resource/scaling-ethereum-navigating-the-blockchain-trilemma/#:~:text=top%20of%20them.-,Validity%20Rollups,-Validity%20rollups%2C%20also)。 -有人可能会想,Starknet是否已经足以应对大多数情况。 +在我们深入探讨 Madara 如何为 Starknet 应用链带来强大的能力前,有必要解决一个问 +题:为什么开发人员会选择在 Starknet 上构建应用链,而不是直接使用[Starknet 有效性 Rollups](https://starkware.co/resource/scaling-ethereum-navigating-the-blockchain-trilemma/#:~:text=top%20of%20them.-,Validity%20Rollups,-Validity%20rollups%2C%20also)。 +有人可能会想,Starknet 是否已经足以应对大多数情况。 -首先让我们了解下为什么应用链是Starknet生态系统中引人注目的扩展方式。 +首先让我们了解下为什么应用链是 Starknet 生态系统中引人注目的扩展方式。 ## 为什么选择应用链 -Madara是由StarkWare探索团队,也称为[Keep Starknet Strange](https://github.com/keep-starknet-strange)开发的,专门设计用于实现StarkWare设计用于实现 StarkWare的[分形缩放](https://medium.com/starkware/fractal-scaling-from-l2-to-l3-7fe238ecfb4f)愿景。有许多令人信服的原因让开发人员选择创建一个Starknet应用链或L3,而不是直接 -依赖于Starknet。 +-->景。有许多令人信服的原因让开发人员选择创建一个 Starknet 应用链或 L3,而不是直 +接依赖于 Starknet。 ### 吞吐量 在现有的区块链基础设施中,应用开发人员在可扩展性上面临重大挑战。可扩展性包括两 个关键点:高速度和低费用。通过在每一层降低一千倍成本,开发人员可以显著降低从L1到 -L3关键点:高速度和低费用。通过在每一层降低一千倍成本,开发人员可以显著降低从 L1 +到 L3的整体成本,最高可达一百万倍。由于应用程序建立在其专用区块链上,从而无需与其 他应用竞争链上资源,吞吐量不受第三方应用活动的影响,这确保了持续平稳的流畅体验。 ### 定制化 -像Starknet和Ethereum等通用链采取了多项措施来确保网络对所有人可用,但这导致了一 -种受限的环境。通过应用链,开发人员可以微调其应用和基础设施的各个方面,创建量身定 制的解决方案。不喜欢Cairo VM的某个特性?可以在你的应用链中将其排除掉。 +-->的解决方案。不喜欢 Cairo VM 的某个特性?可以在你的应用链中将其排除掉。 ### 创新 -应用链的可定制性还允许开发人员可以使用目前在Starknet中不可用或存在风险的功能。 +应用链的可定制性还允许开发人员可以使用目前在 Starknet 中不可用或存在风险的功能。 应用链赋予每个团队自主权,允许他们编写和授权任何所需的代码hints。这使得应用链能够解锁许多用例,譬如可以在不泄露个人隐私的情况下执行链上KYC。 +-->用链赋予每个团队自主权,允许他们编写和授权任何所需的代码 hints。这使得应用链能 +够解锁许多用例,譬如可以在不泄露个人隐私的情况下执行链上 KYC。 -## Madare对应用链堆栈的影响 +## Madare 对应用链堆栈的影响 -一起来看看构成应用链的不同层级间的相互作用,以及Madara的用武之地。 +一起来看看构成应用链的不同层级间的相互作用,以及 Madara 的用武之地。 1. **执行:** 执行层定义了区块的执行和状态差异的生成。Madara 提供了在两种执行工 具包(StarkWare 的 [blockifier](https://github.com/starkware-libs/blockifier)和 LambdaClass的和 LambdaClass 的[starknet_in_rust](https://github.com/lambdaclass/starknet_in_rust))之间切 - 换的灵活性。无论选择了哪个执行工具包,底层框架都使用Cairo VM。Cairo语言有助于创 - 建可证明的程序,这样就能证明计算被正确执行。 -2. **结算:** 作为有效性Rollup,Madara应用链的状态可以仅通过检查其结算层来重建。 - 通过在Starknet L2上更频繁的结算,L3应用链可以实现更快的硬最终性,而去中心化 - 的排序层实现更强大的软最终性,因此,在这两方面(硬和软终结性),结算都得到了增强。 -3. **排序:** Madara负责排序过程,可以根据应用的需求进行调整,无论是简单的FCFS +3. **排序:** Madara 负责排序过程,可以根据应用的需求进行调整,无论是简单的 FCFS 或PGA,还是像Narwhall和Bullshark这类更复杂的方案。一些应用链可以选择部署加密内存池,以确保公平排序并减轻MEV的影响。 + -->PGA,还是像 Narwhall 和 Bullshark 这类更复杂的方案。一些应用链可以选择部署加 + 密内存池,以确保公平排序并减轻 MEV 的影响。 4. **数据可用性:** 数据可用性保证始终可访问完整的状态树,借此向用户提供信心, 即使Madara发生故障的情况下,他们也能证明自己拥有资产的所有权。Madara将为开发者使 Madara 发生故障的情况下,他们也能证明自己拥有资产的所有权。Madara 将为开 + 发者提供多种可供选择的数据可用性方案。 -5. **治理:** 每个Madara应用链可以选择其治理模 +5. **治理:** 每个 Madara 应用链可以选择其治理模 型。[Snapshot X](https://twitter.com/SnapshotLabs)提供了一个依赖于存储证明 并完全基于链上的治理系统。其他治理机制也在探索中,譬如原生的Substrate治理面板。 - 链上治理是Madara的核心价值所在。 + -->完全基于链上的治理系统。其他治理机制也在探索中,譬如原生的 Substrate 治理面 + 板。链上治理是 Madara 的核心价值所在。 ![come come](https://lh4.googleusercontent.com/i7bXi2IPV-LTLzEgueA2SPHGULUFDj1OX4IznOQr5BeZe0hcey-VXA5TOV6q9XaVqBGAcYiie7u7uxw7q1ByZxjkPQKHERqKJTxhdDdTSgBQy8smyNO3jEHiNJv7Eqh8BMxjj4fFlQAW6gm-hQMzyIU) ## 进入: Madara -在Madara中,通过利用Substrate框架并整合Cairo VM来执行Cairo程序和Starknet智能 -合约,从而增强了Cairo VM。Substrate是一个开源Rust框架,以其灵活性而闻名,并用于构建可定制的区块链。与此同时,Cairo VM专门设计用于高效生成程序执行的有效性证 +在 Madara 中,通过利用 Substrate 框架并整合 Cairo VM 来执行 Cairo 程序和 +Starknet 智能合约,从而增强了 Cairo VM。Substrate 是一个开源 Rust 框架,以其灵活性而闻名,并用 +于构建可定制的区块链。与此同时,Cairo VM 专门设计用于高效生成程序执行的有效性证 明。通过在L2上使用状态跟踪和智能合约来验证这些证明,应用链确保集成了Starknet的安全性。 -这样,Madara利用Cairo的强大功能实现了程序执行的可证明性。 +-->过在 L2 上使用状态跟踪和智能合约来验证这些证明,应用链确保集成了 Starknet 的安 +全性。这样,Madara 利用 Cairo 的强大功能实现了程序执行的可证明性。 -Substrate框架固有的模块化特性使开发者可以轻松地定制应用链。没有任何强加的假设, +Substrate 框架固有的模块化特性使开发者可以轻松地定制应用链。没有任何强加的假设, 允许你自行整合共识协议、哈希函数、签名方案、存储布局 - 无论你的应用需要什么, 都可以利用Cairo来生成证明。无论是Starknet还是Ethereum上,开发者都可以在继承底层链可以利用 Cairo 来生成证明。无论是 Starknet 还是 Ethereum 上,开发者都可以在继承 +底层链安全性的同时,不受限制的操作,并可被证明。 -起初,Madara将与Starknet非常相似,使智能合约可以在Starknet生态系统内进行组合。 -未来将有更宏伟的计划,因为Starknet将与[Herodotus](https://www.herodotus.dev/)集来将有更宏伟的计划,因为 Starknet 将与[Herodotus](https://www.herodotus.dev/)集成,利用 [存储证明](https://book.starknet.io/chapter_8/storage_proofs.html)实 现互操作性。存储证明的整合还将使Madara应用链能够考虑来自其他链的状态和流动性。 +-->互操作性。存储证明的整合还将使 Madara 应用链能够考虑来自其他链的状态和流动性。 -准备好见证由Madara开启的Starknet新纪元吧。 +准备好见证由 Madara 开启的 Starknet 新纪元吧。 diff --git a/docs/content/articles/es/madara-beast-article.md b/docs/content/articles/es/madara-beast-article.md new file mode 100644 index 0000000000..412791e2ca --- /dev/null +++ b/docs/content/articles/es/madara-beast-article.md @@ -0,0 +1,165 @@ +![thee BEEAAST](https://imgur.com/EBwBNnB.jpg) + +# Aprovechando a la Bestia - Madara y la Revolución de las Appchains de Starknet + +**20 de julio de 2023** · 1 min de lectura + +_Desde Reducciones Masivas de Costos hasta Control Personalizado, +Descubre el Futuro de la Infraestructura Blockchain_ + +--- + +## TL;DR + +- Madara es un secuenciador de alto rendimiento de Starknet, que proporciona el + poder para crear + [Appchains](https://www.starknet.io/en/posts/ecosystem/the-starknet-stacks-growth-spurt) + personalizadas y eficientes. +- Al utilizar el framework Substrate, Madara amplía las capacidades de la + Máquina Virtual Cairo, lo que conduce a programas demostrables, seguros y + flexibles. +- Su implementación ofrece numerosos beneficios como infraestructura escalable, + alto rendimiento y un control sin precedentes sobre las aplicaciones. +- Las características únicas de Madara incluyen soporte para la privacidad + potencial _on-chain_, interoperabilidad simplificada entre diversas cadenas y + una ejecución robusta. +- Madara está abriendo el camino en el desarrollo de dApps al ofrecer soluciones + rentables, escalables y personalizables en el ámbito de _blockchain_. + +## Introducción + +Imagina tener el poder de personalizar una _blockchain_ específicamente para los +requisitos únicos de tu aplicación; eso es exactamente lo que ofrecen las +appchains. Las appchains son cadenas de bloques específicas para aplicaciones +que ofrecen a los desarrolladores la flexibilidad para ajustar aspectos de las +cadenas para adaptarlas a las necesidades de sus aplicaciones, como elegir una +función hash diferente o personalizar el algoritmo de consenso. ¿La mejor parte? +Las appchains heredan la seguridad de las robustas cadenas L1 o L2 en las que se +basan, proporcionando a los desarrolladores lo mejor de ambos mundos. + +Te presentamos a Madara, un secuenciador revolucionario que combina flexibilidad +y un rendimiento ultrarrápido. Los secuenciadores son entidades responsables de +ejecutar transacciones y agruparlas en lotes. Actuando como una puerta de +entrada para lanzar tu propia appchain Starknet, Madara abre un mundo de +posibilidades para la experimentación en el ecosistema de Starknet como nunca +antes. + +Antes de adentrarnos en las fascinantes capacidades de Madara para habilitar las +appchains de Starknet, es importante abordar la pregunta de por qué los +desarrolladores optarían por construir appchains sobre Starknet en lugar de +utilizar los +[Starknet Validity Rollups](https://starkware.co/resource/scaling-ethereum-navigating-the-blockchain-trilemma/#:~:text=top%20of%20them.-,Validity%20Rollups,-Validity%20rollups%2C%20also) +directamente. Uno podría preguntarse si Starknet ya es suficiente para la +mayoría de los escenarios. + +Primero, aprendamos por qué las appchains son una extensión convincente del +ecosistema de Starknet. + +## Por qué Appchains + +Madara, desarrollada por el Equipo de Exploración de StarkWare, también conocido +como [Keep Starknet Strange](https://github.com/keep-starknet-strange), está +diseñada específicamente para realizar la +[visión de escalabilidad fractal](https://medium.com/starkware/fractal-scaling-from-l2-to-l3-7fe238ecfb4f) +de StarkWare. Existen numerosas razones convincentes por las cuales los +desarrolladores podrían optar por establecer una appchain Starknet o L3 en lugar +de depender directamente de Starknet. + +### Rendimiento + +Los desarrolladores de aplicaciones enfrentan desafíos significativos en +términos de escalabilidad dentro de la infraestructura de _blockchain_ +existente. La escalabilidad abarca dos aspectos cruciales: alta velocidad y +bajos costos. Al implementar una reducción de costos de 1,000 veces en cada +capa, los desarrolladores pueden lograr una reducción de costos general notable +de L1 a L3, potencialmente alcanzando hasta 1,000,000 veces. La velocidad de +procesamiento no se ve afectada por la actividad de aplicaciones de terceros, ya +que la aplicación tiene su propia _blockchain_ dedicada y no compite por +recursos. Esto garantiza una experiencia constantemente fluida. + +### Personalización + +Cadenas de propósito general como Starknet y Ethereum tienen múltiples medidas +para garantizar que la red sea utilizable por todos, lo que lleva a un entorno +limitado. Con las appchains, los desarrolladores pueden ajustar varios aspectos +de sus aplicaciones e infraestructura, creando soluciones a medida. ¿No te gusta +una característica de la Máquina Virtual Cairo? Elimínala en tu appchain. + +### Innovación + +La capacidad de personalización de las appchains también permite a los +desarrolladores trabajar con características que actualmente no están +disponibles o son riesgosas en entornos como Starknet. Las appchains ofrecerán a +cada equipo la autonomía para escribir y autorizar cualquier pista de código +deseada. Esto permite a las appchains desbloquear muchos casos de uso, como la +capacidad de aplicar KYC _on-chain_ sin divulgar información privada. + +## Efecto de Madara en el Stack de Appchains + +1. **Ejecución:** La capa de ejecución define la ejecución de bloques y la + generación de la diferencia de estado. Madara ofrece la flexibilidad para + cambiar entre dos "crates" de ejecución, + [Blockifier de StarkWare](https://github.com/starkware-libs/blockifier) y + [Starknet_in_rust de LambdaClass](https://github.com/lambdaclass/starknet_in_rust). + Independientemente del "crate" elegido, el framework subyacente utiliza la + Máquina Virtual Cairo. El lenguaje Cairo facilita la creación de programas + demostrables, lo que permite la demostración de la ejecución correcta del + cálculo. +2. **Liquidación:** Como Validity Rollup, el estado de una appchain Madara se + puede reconstruir únicamente examinando su capa de liquidación. Al liquidar + más frecuentemente en Starknet L2, una appchain L3 puede lograr una finalidad + más rápida y descentralizar la capa de secuenciación para lograr una + finalidad suave más sólida. Por lo tanto, la liquidación se mejora en ambos + frentes (finalidad dura y suave). +3. **Secuenciación:** Madara se encarga del proceso de secuenciación, que se + puede alterar para satisfacer las necesidades de la aplicación, ya sea un + simple FCFS, PGA o esquemas más complejos como Narwhall & Bullshark. Ciertas + appchains pueden optar por implementar "mempools" encriptados para garantizar + un orden justo y mitigar el impacto de MEV. +4. **Disponibilidad de Datos:** La disponibilidad de datos garantiza que el + árbol de estado completo siga siendo accesible, proporcionando a los usuarios + la confianza de que pueden demostrar la propiedad de sus fondos incluso si + Madara experimenta una interrupción. Madara ofrecerá a los desarrolladores + una variedad de esquemas de disponibilidad de datos (DA) para elegir. +5. **Gobernanza:** Cada appchain Madara puede elegir su modelo de gobernanza. + [Snapshot X](https://twitter.com/SnapshotLabs) ofrece un sistema de + gobernanza completamente _on-chain_ que se basa en pruebas de almacenamiento. + También se están explorando mecanismos de gobernanza alternativos, como el + "governance pallet" nativo de Substrate. La gobernanza _on-chain_ se presenta + como un valor fundamental para Madara. + +![come come](https://lh4.googleusercontent.com/i7bXi2IPV-LTLzEgueA2SPHGULUFDj1OX4IznOQr5BeZe0hcey-VXA5TOV6q9XaVqBGAcYiie7u7uxw7q1ByZxjkPQKHERqKJTxhdDdTSgBQy8smyNO3jEHiNJv7Eqh8BMxjj4fFlQAW6gm-hQMzyIU) + +## Entra: Madara + +En Madara, la Máquina Virtual Cairo se está mejorando mediante la utilización +del framework Substrate e integrando la Máquina Virtual Cairo para ejecutar +programas Cairo y contratos inteligentes de Starknet. Substrate es un framework +Rust de código abierto para construir cadenas de bloques personalizables, que es +conocido por su flexibilidad. Mientras tanto, la Máquina Virtual Cairo está +diseñada específicamente para generar de manera eficiente Pruebas de Validez +para la ejecución de programas. Al utilizar seguimiento de estado y un contrato +inteligente para verificar estas pruebas en L2, la appchain asegura una +integración segura con Starknet. De esta manera, Madara aprovecha el poder de +Cairo para habilitar la demostración de la ejecución de programas. + +La naturaleza modular inherente del framework Substrate permite a los +desarrolladores personalizar la appchain con facilidad. No se imponen +suposiciones, lo que te permite incorporar tu propio protocolo de consenso, +función hash, esquema de firma, distribución de almacenamiento, lo que sea que +tu aplicación requiera, todo mientras utilizas Cairo para generar pruebas. No +hay límites en lo que los desarrolladores pueden hacer mientras siguen siendo +demostrables, heredando la seguridad de la cadena subyacente, ya sea Starknet o +Ethereum. + +Inicialmente, Madara tendrá un fuerte parecido a Starknet, lo que permitirá la +composición de contratos inteligentes dentro del ecosistema de Starknet. Hay +planes más grandes en el futuro a medida que Starknet se integra con +[Herodotus](https://www.herodotus.dev/) para aprovechar +[pruebas de almacenamiento](https://starkware.medium.com/what-are-storage-proofs-and-how-can-they-improve-oracles-e0379108720a) +para lograr interoperabilidad. La integración de pruebas de almacenamiento +también permitirá que las appchains de Madara consideren el estado y la liquidez +de otras cadenas. + +Prepárate para presenciar un nuevo espacio de posibilidades en el universo de +Starknet, habilitado por Madara. diff --git a/docs/faucet-setup.md b/docs/faucet-setup.md index 8aa7aa105f..65f6d5dd2f 100644 --- a/docs/faucet-setup.md +++ b/docs/faucet-setup.md @@ -63,81 +63,47 @@ was added so that the new request to get tokens is sent with the correct nonce. If you have a use case where you need to customize your faucet or you need to get faucet funds using code, you can achieve this by simply transferring funds -from any of the genesis accounts using RPC calls. The genesis account private -key for address `0x2` is available in -`crates/pallets/starknet/src/tests/constants.rs`. +from any of the genesis accounts using RPC calls. The genesis accounts and their +private keys are logged when running a node in `--dev` mode. Keep in mind that account `0x1` on Madara doesn't support multicall so `account.execute` from starknetjs fails. You can either invoke the transfer transaction as shown [here](https://github.com/keep-starknet-strange/madara/blob/c916046adf9d7ea52131442090fae654ba6b234d/tests/util/starknet.ts#L241) -or use an account like `0x2` which is based on Argent and supports multicall. +or use an account that supports multicall. -**Example code for collecting tokens from `0x2` using starknetjs** +**Example code for collecting tokens from `0x4` using starknetjs** ```javascript import * as starknet from "starknet"; +import ERC20 from "./ERC20.json" assert { type: "json" }; const eth_address = "0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7"; const provider = new starknet.RpcProvider({ nodeUrl: "http://localhost:9944", }); -const starkKeyPair = starknet.ec.getKeyPair( +const account = new starknet.Account( + provider, + "0x0000000000000000000000000000000000000000000000000000000000000004", "0x00c1cf1490de1352865301bb8705143f3ef938f97fdf892f1090dcb5ac7bcd1d", + "1", ); -const address = "0x2"; async function transfer(to) { - const nonce = await provider.getNonceForAddress(address); - const chainId = await provider.getChainId(); - - const calldata = starknet.transaction.fromCallsToExecuteCalldata([ - { - contractAddress: eth_address, - entrypoint: "transfer", - calldata: starknet.stark.compileCalldata({ - recipient: to, - amount: { - type: "struct", - low: "1000000", - high: "0", - }, - }), + const contract = new starknet.Contract(ERC20.abi, eth_address, provider); + let result = contract.populate("transfer", { + recipient: to, + amount: { + low: 10000000, + high: 0, }, - ]); - const maxFee = "0x11111111111"; - const version = "0x1"; - const txnHash = starknet.hash.calculateTransactionHash( - address, - version, - calldata, - maxFee, - chainId, - nonce, - ); - const signature = starknet.ec.sign(starkKeyPair, txnHash); - const invocationCall = { - signature, - contractAddress: address, - calldata, - }; - const invocationDetails = { - maxFee, - nonce, - version, - }; - - // if estimating fees passes without failures, the txn should go through - const estimateFee = await provider.getEstimateFee( - invocationCall, - invocationDetails, - ); - console.log("Estimate fee - ", estimateFee); - - const tx = await provider.invokeFunction(invocationCall, invocationDetails); - console.log(tx.transaction_hash); + }); + + let hash = await account.execute(result, undefined, {}); + + console.log("Txn hash - ", hash); } -transfer("0x11"); +transfer("0x100"); ``` diff --git a/docs/genesis.md b/docs/genesis.md index 6972297c7c..ab993d7420 100644 --- a/docs/genesis.md +++ b/docs/genesis.md @@ -32,6 +32,7 @@ The below defines all hardcoded values set in the geneses: | 0x0279d77db761fba82e0054125a6fdb5f6baa6286fa3fb73450cc44d193c2d37f | No validation account class hash | | 0x35ccefcf9d5656da623468e27e682271cd327af196785df99e7fee1436b6276 | No validation account class hash cairo 1 | | 0x06f0d6f6ae72e1a507ff4b65181291642889742dbf8f1a53e9ec1c595d01ba7d | Argent account class hash | +| 0x01d53d50d204842575c87f25161248aadb26d33c7375d0f043e2a3f1243cf874 | Argent multicall class hash | | 0x0424b7f61e3c5dfd74400d96fdea7e1f0bf2757f31df04387eaa957f095dd7b9 | Proxy class hash | | 0x2c2b8f559e1221468140ad7b2352b1a5be32660d0bf1a3ae3a054a4ec5254e4 | Braavos account class hash | | 0x5aa23d5bb71ddaa783da7ea79d405315bafa7cf0387a74f4593578c3e9e6570 | Braavos account base implementation class hash | @@ -60,6 +61,7 @@ The below defines all hardcoded values set in the geneses: | 0x040e59c2c182a58fb0a74349bfa4769cbbcba32547591dd3fb1def8623997d02 | 0x077cc28ed3c661419fda16bf120fb81f1f8f28617f5543b05a86d63b0926bbf4 | | 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x0372ee6669dc86563007245ed7343d5180b96221ce28f44408cff2898038dbd4 | | 0x041a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf | 0x07b3e05f48f0c69e4a65ce5e076a66271a527aff2c34ce1083ec6e1526997a69 | +| 0x05754af3760f3356da99aea5c3ec39ccac7783d925a19666ebbeca58ff0087f4 | 0x01d53d50d204842575c87f25161248aadb26d33c7375d0f043e2a3f1243cf874 | @@ -85,6 +87,9 @@ deployed as a ERC721 contract (given the class hash of 0x80000). | Contract address | Storage key | Storage value | | :----------------------------------------------------------------- | :------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x341c1bdfd89f69748aa00b5742b03adbffd79b8e80cab5c50d91cd8c2a79be1 (ERC20_name) | 0x4574686572 | +| 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x0b6ce5410fca59d078ee9b2a4371a9d684c530d697c64fbef0ae6d5e8f0ac72 (ERC20_symbol) | 0x455448 | +| 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x1f0d4aa99431d246bac9b8e48c33e888245b15e9678f64f9bdfc8823dc8f979 (ERC20_decimals) | 0x12 | | 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x7b62949c85c6af8a50c11c22927f9302f7a2e40bc93b4c988415915b0f97f09 (ERC20_balances(0x1).low) | 0xffffffffffffffffffffffffffffffff (U128::MAX) | | 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x7b62949c85c6af8a50c11c22927f9302f7a2e40bc93b4c988415915b0f97f0a (ERC20_balances(0x1).high) | 0xffffffffffffffffffffffffffffffff (U128::MAX) | | 0x49d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7 | 0x1d8bbc4f93f5ab9858f6c0c0de2769599fb97511503d5bf2872ef6846f2146f (ERC20_balances(0x2).low) | 0xffffffffffffffffffffffffffffffff (U128::MAX) | @@ -153,6 +158,9 @@ deployed as a ERC721 contract (given the class hash of 0x80000). | Contract address | Storage key | Storage value | | :----------------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| 0x00000000000000000000000000000000000000000000000000000000000000AA | 0x341c1bdfd89f69748aa00b5742b03adbffd79b8e80cab5c50d91cd8c2a79be1 (ERC20_name) | 0x4574686572 | +| 0x00000000000000000000000000000000000000000000000000000000000000AA | 0x0b6ce5410fca59d078ee9b2a4371a9d684c530d697c64fbef0ae6d5e8f0ac72 (ERC20_symbol) | 0x455448 | +| 0x00000000000000000000000000000000000000000000000000000000000000AA | 0x1f0d4aa99431d246bac9b8e48c33e888245b15e9678f64f9bdfc8823dc8f979 (ERC20_decimals) | 0x12 | | 0x00000000000000000000000000000000000000000000000000000000000000AA | 0x3701645da930cd7f63318f7f118a9134e72d64ab73c72ece81cae2bd5fb403f (ERC20_balances(0x01a3339ec92ac1061e3e0f8e704106286c642eaf302e94a582e5f95ef5e6b4d0).low) | 0xffffffffffffffffffffffffffffffff | | 0x00000000000000000000000000000000000000000000000000000000000000AA | 0x3701645da930cd7f63318f7f118a9134e72d64ab73c72ece81cae2bd5fb4040 (ERC20_balances(0x01a3339ec92ac1061e3e0f8e704106286c642eaf302e94a582e5f95ef5e6b4d0).high) | 0xffffffffffffffffffffffffffffffff | | 0x00000000000000000000000000000000000000000000000000000000000000AA | 0x25aa869465e1c3ac7ed6e933ef1af43f4d9126339b8f453f692d631c4a40d24 (ERC20_balances(0x0642a8b9e2c6cc3a9ddb84575123f262a21415f78db453b0625d889e1e06ac32).low) | 0xffffffffffffffffffffffffffffffff | diff --git a/starknet-rpc-test/Cargo.toml b/starknet-rpc-test/Cargo.toml index dfb4ebafd1..e78da3871b 100644 --- a/starknet-rpc-test/Cargo.toml +++ b/starknet-rpc-test/Cargo.toml @@ -8,12 +8,12 @@ edition = "2021" anyhow = "1.0.72" assert_matches = "1.5.0" -async-lock = "3.0.0" +async-lock = "3.1.0" flate2 = { workspace = true } reqwest = "0.11.18" rstest = "0.18.1" -serde = { version = "1.0.190", features = ["derive"] } -serde_json = "1.0.107" +serde = { version = "1.0.192", features = ["derive"] } +serde_json = "1.0.108" starknet-accounts = { workspace = true } starknet-contract = { workspace = true } starknet-core = { workspace = true } @@ -22,7 +22,7 @@ starknet-ff = { workspace = true } starknet-gateway = { workspace = true } starknet-signers = { workspace = true } thiserror = { workspace = true } -tokio = { version = "1.33.0", features = ["rt", "macros", "parking_lot"] } +tokio = { version = "1.34.0", features = ["rt", "macros", "parking_lot"] } url = "2.4.1" [[test]] diff --git a/starknet-rpc-test/call.rs b/starknet-rpc-test/call.rs index 94da902380..9e4e02f51c 100644 --- a/starknet-rpc-test/call.rs +++ b/starknet-rpc-test/call.rs @@ -109,7 +109,7 @@ async fn works_on_correct_call_no_calldata(madara: &ThreadSafeMadaraClient) -> R ) .await .unwrap(), - vec![FieldElement::ZERO] + vec![FieldElement::from_hex_be("0x4574686572").unwrap()] ); Ok(()) diff --git a/starknet-rpc-test/estimate_fee.rs b/starknet-rpc-test/estimate_fee.rs index 3982876153..c349ac4156 100644 --- a/starknet-rpc-test/estimate_fee.rs +++ b/starknet-rpc-test/estimate_fee.rs @@ -84,9 +84,7 @@ async fn fail_if_one_txn_cannot_be_executed(madara: &ThreadSafeMadaraClient) -> async fn works_ok(madara: &ThreadSafeMadaraClient) -> Result<(), anyhow::Error> { let rpc = madara.get_starknet_client().await; - // from mainnet tx: 0x000c52079f33dcb44a58904fac3803fd908ac28d6632b67179ee06f2daccb4b5 - // https://starkscan.co/tx/0x000c52079f33dcb44a58904fac3803fd908ac28d6632b67179ee06f2daccb4b5 - let invoke_transaction = BroadcastedTransaction::Invoke(BroadcastedInvokeTransaction { + let tx = BroadcastedInvokeTransaction { max_fee: FieldElement::ZERO, signature: vec![], nonce: FieldElement::ZERO, @@ -99,18 +97,25 @@ async fn works_ok(madara: &ThreadSafeMadaraClient) -> Result<(), anyhow::Error> FieldElement::from_hex_be("494196e88ce16bff11180d59f3c75e4ba3475d9fba76249ab5f044bcd25add6").unwrap(), ], is_query: true, - }); + }; + + // from mainnet tx: 0x000c52079f33dcb44a58904fac3803fd908ac28d6632b67179ee06f2daccb4b5 + // https://starkscan.co/tx/0x000c52079f33dcb44a58904fac3803fd908ac28d6632b67179ee06f2daccb4b5 + let invoke_transaction = BroadcastedTransaction::Invoke(tx.clone()); + + let invoke_transaction_2 = + BroadcastedTransaction::Invoke(BroadcastedInvokeTransaction { nonce: FieldElement::ONE, ..tx }); - let estimate = - rpc.estimate_fee(&vec![invoke_transaction.clone(), invoke_transaction], BlockId::Tag(BlockTag::Latest)).await?; + let estimates = + rpc.estimate_fee(&vec![invoke_transaction, invoke_transaction_2], BlockId::Tag(BlockTag::Latest)).await?; // TODO: instead execute the tx and check that the actual fee are the same as the estimated ones - assert_eq!(estimate.len(), 2); - assert_eq!(estimate[0].overall_fee, 410); - assert_eq!(estimate[1].overall_fee, 410); + assert_eq!(estimates.len(), 2); + assert_eq!(estimates[0].overall_fee, 410); + assert_eq!(estimates[1].overall_fee, 410); // https://starkscan.co/block/5 - assert_eq!(estimate[0].gas_consumed, 0); - assert_eq!(estimate[1].gas_consumed, 0); + assert_eq!(estimates[0].gas_consumed, 0); + assert_eq!(estimates[1].gas_consumed, 0); Ok(()) } diff --git a/starknet-rpc-test/get_transaction_receipt.rs b/starknet-rpc-test/get_transaction_receipt.rs index 7173701799..740edad08e 100644 --- a/starknet-rpc-test/get_transaction_receipt.rs +++ b/starknet-rpc-test/get_transaction_receipt.rs @@ -67,7 +67,7 @@ async fn work_with_invoke_transaction(madara: &ThreadSafeMadaraClient) -> Result match invoke_tx_receipt { Ok(MaybePendingTransactionReceipt::Receipt(TransactionReceipt::Invoke(receipt))) => { assert_eq!(receipt.transaction_hash, rpc_response.transaction_hash); - // assert_eq!(receipt.actual_fee, expected_fee); TODO: Fix in RPC + assert_eq!(receipt.actual_fee, expected_fee); assert_eq!(receipt.finality_status, TransactionFinalityStatus::AcceptedOnL2); assert_eq_msg_to_l1(receipt.messages_sent, vec![]); assert_eq_event( @@ -143,7 +143,7 @@ async fn work_with_declare_transaction(madara: &ThreadSafeMadaraClient) -> Resul _ => panic!("expected declare transaction receipt"), }; assert_eq!(d1.transaction_hash, d2.transaction_hash); - // assert_eq!(d1.actual_fee, d2.actual_fee); TODO: Fix in rpc + assert_eq!(d1.actual_fee, d2.actual_fee); assert_eq!(d1.finality_status, d2.finality_status); assert_eq!(d1.block_hash, d2.block_hash); assert_eq!(d1.block_number, d2.block_number); @@ -175,7 +175,7 @@ async fn work_with_declare_transaction(madara: &ThreadSafeMadaraClient) -> Resul from_address: fee_token_address, keys: vec![get_selector_from_name("Transfer").unwrap()], data: vec![ - FieldElement::from_hex_be(ARGENT_CONTRACT_ADDRESS).unwrap(), // to (sequencer address) + FieldElement::from_hex_be(ARGENT_CONTRACT_ADDRESS).unwrap(), // from FieldElement::from_hex_be(SEQUENCER_ADDRESS).unwrap(), // to (sequencer address) expected_fee, // value low FieldElement::ZERO, // value high @@ -233,7 +233,7 @@ async fn work_with_deploy_account_transaction(madara: &ThreadSafeMadaraClient) - match account_deployment_tx_receipt { Ok(MaybePendingTransactionReceipt::Receipt(TransactionReceipt::DeployAccount(receipt))) => { assert_eq!(receipt.transaction_hash, rpc_response.transaction_hash); - // assert_eq!(receipt.actual_fee, expected_fee); TODO: fix in code + assert_eq!(receipt.actual_fee, expected_fee); assert_eq!(receipt.finality_status, TransactionFinalityStatus::AcceptedOnL2); assert_eq_msg_to_l1(receipt.messages_sent, vec![]); assert_eq_event( @@ -250,7 +250,7 @@ async fn work_with_deploy_account_transaction(madara: &ThreadSafeMadaraClient) - }], ); assert_matches!(receipt.execution_result, ExecutionResult::Succeeded); - assert_eq!(receipt.contract_address, FieldElement::ZERO); + assert_eq!(receipt.contract_address, account_address); } _ => panic!("expected deploy account transaction receipt"), }; @@ -258,6 +258,71 @@ async fn work_with_deploy_account_transaction(madara: &ThreadSafeMadaraClient) - Ok(()) } +#[rstest] +#[tokio::test] +async fn ensure_transfer_fee_event_not_messed_up_with_similar_transfer( + madara: &ThreadSafeMadaraClient, +) -> Result<(), anyhow::Error> { + let rpc = madara.get_starknet_client().await; + + let mut madara_write_lock = madara.write().await; + let transfer_amount = FieldElement::from_hex_be("0x100000").unwrap(); + let funding_account = build_single_owner_account(&rpc, SIGNER_PRIVATE, ARGENT_CONTRACT_ADDRESS, true); + let mut tx = madara_write_lock + .create_block_with_txs(vec![Transaction::Execution(funding_account.transfer_tokens( + FieldElement::from_hex_be(SEQUENCER_ADDRESS).unwrap(), + transfer_amount, + None, + ))]) + .await?; + let rpc_response = match tx.remove(0).unwrap() { + TransactionResult::Execution(rpc_response) => rpc_response, + _ => panic!("expected execution result"), + }; + let tx_receipt = get_transaction_receipt(&rpc, rpc_response.transaction_hash).await; + let fee_token_address = FieldElement::from_hex_be(FEE_TOKEN_ADDRESS).unwrap(); + let expected_fee = FieldElement::from_hex_be("0xf154").unwrap(); + + match tx_receipt { + Ok(MaybePendingTransactionReceipt::Receipt(TransactionReceipt::Invoke(mut receipt))) => { + assert_eq!(receipt.transaction_hash, rpc_response.transaction_hash); + assert_eq!(receipt.actual_fee, expected_fee); + assert_eq!(receipt.finality_status, TransactionFinalityStatus::AcceptedOnL2); + assert_eq_msg_to_l1(receipt.messages_sent, vec![]); + receipt.events.remove(1); + assert_eq_event( + receipt.events, + vec![ + Event { + from_address: fee_token_address, + keys: vec![get_selector_from_name("Transfer").unwrap()], + data: vec![ + FieldElement::from_hex_be(ARGENT_CONTRACT_ADDRESS).unwrap(), // from + FieldElement::from_hex_be(SEQUENCER_ADDRESS).unwrap(), // to + transfer_amount, // value low + FieldElement::ZERO, // value high + ], + }, + Event { + from_address: fee_token_address, + keys: vec![get_selector_from_name("Transfer").unwrap()], + data: vec![ + FieldElement::from_hex_be(ARGENT_CONTRACT_ADDRESS).unwrap(), // from + FieldElement::from_hex_be(SEQUENCER_ADDRESS).unwrap(), // to + expected_fee, // value low + FieldElement::ZERO, // value high + ], + }, + ], + ); + assert_matches!(receipt.execution_result, ExecutionResult::Succeeded); + } + _ => panic!("expected transfer receipt"), + }; + + Ok(()) +} + #[rstest] #[tokio::test] async fn fail_invalid_transaction_hash(madara: &ThreadSafeMadaraClient) -> Result<(), anyhow::Error> {