Merge remote-tracking branch 'origin/master' into lexnv/update-smoldot

Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io>
This commit is contained in:
Alexandru Vasile
2024-04-03 19:05:40 +03:00
190 changed files with 27843 additions and 18050 deletions
+2 -2
View File
@@ -5,12 +5,12 @@ updates:
schedule:
interval: weekly
ignore:
# these need to be updated together, so dependabot PRs
# these need to be updated together, so dependabot PRs
# are just noise. So, ignore them:
- dependency-name: sp-core
- dependency-name: sp-keyring
- dependency-name: sp-runtime
- dependency-name: sp-core-hashing
- dependency-name: sp-crypto-hashing
- dependency-name: sp-version
- package-ecosystem: github-actions
directory: '/'
@@ -0,0 +1,3 @@
# use-nodes
This action downloads the substrate and polkadot binaries produced from the `build-nodes` workflow and puts them into the `$PATH`.
@@ -0,0 +1,40 @@
name: Use substrate and polkadot binaries
description: Downloads and configures the substrate and polkadot binaries built with `build-nodes`
runs:
using: composite
steps:
- name: Download substrate-node binary
id: download-substrate-binary
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
with:
workflow: build-nodes.yml
name: nightly-substrate-binary
- name: Download polkadot binary
id: download-polkadot-binary
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
with:
workflow: build-nodes.yml
name: nightly-polkadot-binary
- name: decompress polkadot binary
shell: bash
run: |
tar -xzvf ./polkadot.tar.gz
cp ./target/release/polkadot ./polkadot
- name: Prepare binaries
shell: bash
run: |
chmod u+x ./substrate-node
chmod u+x ./polkadot
chmod u+x ./polkadot-execute-worker
chmod u+x ./polkadot-prepare-worker
./substrate-node --version
./polkadot --version
mkdir -p ~/.local/bin
mv ./substrate-node ~/.local/bin
mv ./polkadot ~/.local/bin
mv ./polkadot-execute-worker ~/.local/bin
mv ./polkadot-prepare-worker ~/.local/bin
rm ./polkadot.tar.gz
@@ -1,3 +0,0 @@
# use-substrate
This action downloads the substrate binary produced from the `build-substrate` workflow and puts it into the `$PATH`.
@@ -1,19 +0,0 @@
name: Use substrate binary
description: Downloads and configures the substrate binary built with build-substrate
runs:
using: composite
steps:
- name: Download substrate-node binary
id: download-artifact
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
with:
workflow: build-substrate.yml
name: nightly-substrate-binary
- name: Prepare substrate-node binary
shell: bash
run: |
chmod u+x ./substrate-node
./substrate-node --version
mkdir -p ~/.local/bin
cp ./substrate-node ~/.local/bin
@@ -1,4 +1,4 @@
name: Build Substrate Binary
name: Build Substrate and Polkadot Binaries
on:
# Allow it to be manually ran to rebuild binary when needed:
@@ -9,7 +9,7 @@ on:
jobs:
tests:
name: Build Substrate
name: Build Substrate and Polkadot Binaries
runs-on: ubuntu-latest-16-cores
steps:
- name: checkout polkadot-sdk
@@ -35,6 +35,17 @@ jobs:
command: build
args: --release --manifest-path substrate/bin/node/cli/Cargo.toml
- name: build polkadot binary
uses: actions-rs/cargo@v1
with:
command: build
args: --release --manifest-path polkadot/Cargo.toml
- name: Strip binaries
run: |
cargo install cargo-strip
cargo strip
- name: upload substrate binary
uses: actions/upload-artifact@v3
with:
@@ -42,3 +53,19 @@ jobs:
path: target/release/substrate-node
retention-days: 2
if-no-files-found: error
# Note: Uncompressed polkadot binary is ~124MB -> too large for git (max 100MB) without git lfs. Compressed it is only ~45MB
- name: compress polkadot binary
run: |
tar -zcvf target/release/polkadot.tar.gz target/release/polkadot
- name: upload polkadot binary
uses: actions/upload-artifact@v3
with:
name: nightly-polkadot-binary
path: |
target/release/polkadot.tar.gz
target/release/polkadot-execute-worker
target/release/polkadot-prepare-worker
retention-days: 2
if-no-files-found: error
+3 -3
View File
@@ -16,8 +16,8 @@ jobs:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -37,7 +37,7 @@ jobs:
# If any previous step fails, create a new Github issue to notify us about it.
- if: ${{ failure() }}
uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
+156 -61
View File
@@ -46,16 +46,50 @@ jobs:
command: fmt
args: --all -- --check
clippy:
name: Cargo clippy
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
machete:
name: "Check unused dependencies"
runs-on: ubuntu-latest
needs: fmt
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
- name: Install cargo-machete
run: cargo install cargo-machete
- name: Check unused dependencies
uses: actions-rs/cargo@v1.0.3
with:
command: machete
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
clippy:
name: Cargo clippy
runs-on: ubuntu-latest
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -69,21 +103,56 @@ jobs:
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
- name: Run clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-targets -- -D warnings
run: |
cargo clippy --all-targets --features unstable-light-client -- -D warnings
cargo clippy -p subxt-lightclient --no-default-features --features web -- -D warnings
cargo clippy -p subxt --no-default-features --features web -- -D warnings
cargo clippy -p subxt --no-default-features --features web,unstable-light-client -- -D warnings
check:
name: Cargo check
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
wasm_clippy:
name: Cargo clippy (WASM)
runs-on: ubuntu-latest
needs: [fmt, clippy]
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
target: wasm32-unknown-unknown
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
- name: Run clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: -p subxt --no-default-features --features web,unstable-light-client,jsonrpsee --target wasm32-unknown-unknown -- -D warnings
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
check:
name: Cargo check
runs-on: ubuntu-latest
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -96,7 +165,7 @@ jobs:
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
- name: Install cargo-hack
uses: baptiste0928/cargo-install@v2
uses: baptiste0928/cargo-install@v3
with:
crate: cargo-hack
version: 0.5
@@ -118,8 +187,8 @@ jobs:
- name: Cargo check subxt-signer
run: |
cargo check -p subxt-signer
cargo check -p subxt-signer --no-default-features --features sr25519,native
cargo check -p subxt-signer --no-default-features --features ecdsa,native
cargo check -p subxt-signer --no-default-features --features sr25519
cargo check -p subxt-signer --no-default-features --features ecdsa
# We can't enable web features here, so no cargo hack.
- name: Cargo check subxt-lightclient
@@ -133,10 +202,13 @@ jobs:
- name: Cargo check parachain-example
run: cargo check --manifest-path examples/parachain-example/Cargo.toml
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
wasm_check:
name: Cargo check (WASM)
runs-on: ubuntu-latest
needs: [fmt, clippy]
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v4
@@ -157,45 +229,19 @@ jobs:
run: |
cargo check --manifest-path examples/wasm-example/Cargo.toml --target wasm32-unknown-unknown
machete:
name: "Check unused dependencies"
runs-on: ubuntu-latest
needs: [check, wasm_check]
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
- name: Install cargo-machete
run: cargo install cargo-machete
- name: Check unused dependencies
uses: actions-rs/cargo@v1.0.3
with:
command: machete
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
docs:
name: Check documentation and run doc tests
runs-on: ubuntu-latest
needs: [check, wasm_check]
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -216,17 +262,20 @@ jobs:
command: test
args: --doc
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
tests:
name: "Test (Native)"
runs-on: ubuntu-latest-16-cores
needs: [machete, docs]
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 30
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -247,17 +296,20 @@ jobs:
command: nextest
args: run --workspace
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
unstable_backend_tests:
name: "Test (Unstable Backend)"
runs-on: ubuntu-latest-16-cores
needs: [machete, docs]
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 30
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -278,17 +330,20 @@ jobs:
command: nextest
args: run --workspace --features unstable-backend-client
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
light_client_tests:
name: "Test (Light Client)"
runs-on: ubuntu-latest
needs: [machete, docs]
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 15
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -306,10 +361,13 @@ jobs:
command: test
args: --release --package integration-tests --features unstable-light-client
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
wasm_tests:
name: Test (WASM)
runs-on: ubuntu-latest
needs: [machete, docs]
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 30
env:
# Set timeout for wasm tests to be much bigger than the default 20 secs.
@@ -330,8 +388,8 @@ jobs:
- name: Rust Cache
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Run subxt WASM tests
run: |
@@ -358,3 +416,40 @@ jobs:
wasm-pack test --headless --firefox
wasm-pack test --headless --chrome
working-directory: signer/wasm-tests
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
no-std-tests:
name: "Test (no_std)"
runs-on: ubuntu-latest
needs: [machete, docs]
timeout-minutes: 30
steps:
- name: Checkout sources
uses: actions/checkout@v4
# Note: needs nighly toolchain because otherwise we cannot define custom lang-items.
- name: Install Rust nightly toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
target: thumbv7em-none-eabi
- name: Install the gcc-arm-none-eabi linker
run: sudo apt install gcc-arm-none-eabi
- name: Rust Cache
uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
# Note: We currently do not have a way to run real tests in a `no_std` environment.
# We can only make sure that they compile to ARM thumb ISA.
# Running the binary and inspecting the output would require an actual machine with matching ISA or some sort of emulator.
- name: Compile `no-std-tests` crate to `thumbv7em-none-eabi` target.
run: cargo build --target thumbv7em-none-eabi
working-directory: testing/no-std-tests
- if: "failure()"
uses: "andymckay/cancel-action@271cfbfa11ca9222f7be99a47e8f929574549e0a" # v0.4
+9 -6
View File
@@ -19,10 +19,9 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
# We run this (up-to-date) node locally to fetch metadata from it for the artifacts
- name: Use substrate-node binary
uses: ./.github/workflows/actions/use-substrate
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
@@ -42,12 +41,16 @@ jobs:
- name: Fetch Artifacts
run: cargo run --bin artifacts
- name: Delete substrate node binary
run: rm ./substrate-node
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.SUBXT_PR_MAKER_APP_ID }}
private-key: ${{ secrets.SUBXT_PR_MAKER_APP_KEY }}
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
uses: peter-evans/create-pull-request@v6
with:
token: ${{ steps.app-token.outputs.token }}
base: master
branch: update-artifacts
commit-message: Update Artifacts (auto-generated)
+111 -4
View File
@@ -4,8 +4,115 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.35.0] - 2024-03-21
This release contains several fixes, adds `no_std` support to a couple of crates (`subxt-signer` and `subxt-metadata`) and introduces a few quality of life improvements, which I'll quickly cover:
### Reworked light client ([#1475](https://github.com/paritytech/subxt/pull/1475))
This PR reworks the light client interface. The "basic" usage of connecting to a parachain now looks like this:
```rust
#[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata_small.scale")]
pub mod polkadot {}
use subxt::lightclient::LightClient;
// Instantiate a light client with the Polkadot relay chain given its chain spec.
let (lightclient, polkadot_rpc) = LightClient::relay_chain(POLKADOT_SPEC)?;
// Connect the light client to some parachain by giving a chain spec for it.
let asset_hub_rpc = lightclient.parachain(ASSET_HUB_SPEC)?;
// Now, we can create Subxt clients from these Smoldot backed RPC clients:
let polkadot_api = OnlineClient::<PolkadotConfig>::from_rpc_client(polkadot_rpc).await?;
let asset_hub_api = OnlineClient::<PolkadotConfig>::from_rpc_client(asset_hub_rpc).await?;
```
This interface mirrors the requirement that we must connect to a relay chain before we can connect to a parachain. It also moves the light client specific logic into an `RpcClientT` implementation, rather than exposing it as a `subxt::client::LightClient`.
### Typed Storage Keys ([#1419](https://github.com/paritytech/subxt/pull/1419))
This PR changes the storage interface so that, where possible, we now also decode the storage keys as well as the values when iterating over storage entries:
```rust
#[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata_small.scale")]
pub mod polkadot {}
// Create a new API client, configured to talk to Polkadot nodes.
let api = OnlineClient::<PolkadotConfig>::new().await?;
// Build a storage query to iterate over account information.
let storage_query = polkadot::storage().system().account_iter();
// Get back an iterator of results (here, we are fetching 10 items at
// a time from the node, but we always iterate over one at a time).
let mut results = api.storage().at_latest().await?.iter(storage_query).await?;
while let Some(Ok(kv)) = results.next().await {
// We used to get a tuple of key bytes + value. Now we get back a
// `kv` struct containing the bytes and value as well as the actual
// decoded keys:
println!("Decoded key(s): {:?}", kv.keys);
println!("Key bytes: 0x{}", hex::encode(&kv.key_bytes));
println!("Value: {:?}", kv.value);
}
```
When using the static interface, keys come back as a tuple of values corresponding to the different hashers used in constructing the key. When using a dynamic interface, keys will be encoded/decoded from the type given so long as it implements `subxt::storage::StorageKey`, eg `Vec<scale_value::Value>`.
### Extrinsic Params Refinement ([#1439](https://github.com/paritytech/subxt/pull/1439))
Prior to this PR, one could configure extrinsic signed extensions by providing some params like so:
```rust
// Configure the transaction parameters; we give a small tip and set the
// transaction to live for 32 blocks from the `latest_block` above:
let tx_params = Params::new()
.tip(1_000)
.mortal(latest_block.header(), 32)
.build();
let hash = api.tx().sign_and_submit(&tx, &from, tx_params).await?;
```
If you want to customize the account nonce, you'd use a different call like `create_signed_with_nonce` instead.
One of the downsides of the above approach is that, if you don't provide any explicit params, transactions will be immortal by default (because the signed extensions didn't have the information to do any better).
Now, with the help of a `RefineParams` trait, transactions will default to being mortal and living for 32 blocks unless an explicit mortality is provided as above.
One notable change is that the offline-only `create_signed_with_nonce` and `create_partial_signed_with_nonce` functions have lost the `_with_nonce` suffix. Since we can't discover nonce/mortality settings offline, you should now provide `Params` and set an explicit nonce (and mortality, if you like) when using these calls, otherwise the nonce will be set to 0 and the mortality to `Immortal`.
For a full list of changes, please see the following:
### Added
- Reworked light client ([#1475](https://github.com/paritytech/subxt/pull/1475))
- `no_std` compatibility for `subxt-signer` ([#1477](https://github.com/paritytech/subxt/pull/1477))
- Typed Storage Keys ([#1419](https://github.com/paritytech/subxt/pull/1419))
- Extrinsic Params Refinement ([#1439](https://github.com/paritytech/subxt/pull/1439))
- Make storage_page_size for the LegacyBackend configurable ([#1458](https://github.com/paritytech/subxt/pull/1458))
- `no_std` compatibility for `subxt-metadata` ([#1401](https://github.com/paritytech/subxt/pull/1401))
- Experimental `reconnecting-rpc-client` ([#1396](https://github.com/paritytech/subxt/pull/1396))
### Changed
- `scale-type-resolver` integration ([#1460](https://github.com/paritytech/subxt/pull/1460))
- subxt: Derive `std::cmp` traits for subxt payloads and addresses ([#1429](https://github.com/paritytech/subxt/pull/1429))
- CLI: Return error on wrongly specified type paths ([#1397](https://github.com/paritytech/subxt/pull/1397))
- rpc v2: chainhead support multiple finalized block hashes in `FollowEvent::Initialized` ([#1476](https://github.com/paritytech/subxt/pull/1476))
- rpc v2: rename transaction to transactionWatch ([#1399](https://github.com/paritytech/subxt/pull/1399))
### Fixed
- Avoid a panic in case we try decoding naff bytes ([#1444](https://github.com/paritytech/subxt/pull/1444))
- Fix error mapping to wrong transaction status ([#1445](https://github.com/paritytech/subxt/pull/1445))
- Update DispatchError to match latest in polkadot-sdk ([#1442](https://github.com/paritytech/subxt/pull/1442))
- Handle errors when fetching storage keys from Unstablebackend ([#1440](https://github.com/paritytech/subxt/pull/1440))
- Swap type aliases around to be semantically correct ([#1441](https://github.com/paritytech/subxt/pull/1441))
## [0.34.0] - 2024-01-23
This release introduces a bunch of features that make subxt easier to use. Let's look at a few of them.
### Codegen - Integrating [`scale-typegen`](https://github.com/paritytech/scale-typegen) and adding type aliases ([#1249](https://github.com/paritytech/subxt/pull/1249))
@@ -23,7 +130,7 @@ If you provide an invalid type path, the macro will tell you so. It also suggest
```rust
#[subxt::subxt(
runtime_metadata_path = "metadata.scale",
runtime_metadata_path = "metadata.scale",
derive_for_type(path = "Junctions", derive = "Clone")
)]
pub mod polkadot {}
@@ -34,7 +141,7 @@ This gives you a compile-time error like this:
```md
Type `Junctions` does not exist at path `Junctions`
A type with the same name is present at:
A type with the same name is present at:
xcm::v3::junctions::Junctions
xcm::v2::multilocation::Junctions
```
@@ -78,7 +185,7 @@ Our CLI tool now allows you to explore runtime APIs and events ([#1290](https://
# Show details about a runtime API call:
subxt explore --url wss://westend-rpc.polkadot.io api StakingAPI nominations_quota
# Execute a runtime API call from the CLI:
subxt explore --url wss://westend-rpc.polkadot.io api core version -e
subxt explore --url wss://westend-rpc.polkadot.io api core version -e
# Discover what events a pallet can emit:
subxt explore --url wss://westend-rpc.polkadot.io pallet Balances events
```
Generated
+732 -488
View File
File diff suppressed because it is too large Load Diff
+61 -49
View File
@@ -2,6 +2,7 @@
members = [
"cli",
"codegen",
"core",
"lightclient",
"testing/substrate-runner",
"testing/test-runtime",
@@ -18,13 +19,20 @@ members = [
# We exclude any crates that would depend on non mutually
# exclusive feature flags and thus can't compile with the
# workspace:
exclude = ["testing/wasm-rpc-tests", "testing/wasm-lightclient-tests", "signer/wasm-tests", "examples/wasm-example", "examples/parachain-example"]
exclude = [
"testing/no-std-tests",
"testing/wasm-rpc-tests",
"testing/wasm-lightclient-tests",
"signer/wasm-tests",
"examples/wasm-example",
"examples/parachain-example"
]
resolver = "2"
[workspace.package]
authors = ["Parity Technologies <admin@parity.io>"]
edition = "2021"
version = "0.34.0"
version = "0.35.0"
rust-version = "1.74.0"
license = "Apache-2.0 OR GPL-3.0"
repository = "https://github.com/paritytech/subxt"
@@ -51,53 +59,56 @@ type_complexity = "allow"
all = "deny"
[workspace.dependencies]
async-trait = "0.1.74"
async-trait = "0.1.79"
assert_matches = "1.5.0"
base58 = { version = "0.2.0" }
bitvec = { version = "1", default-features = false }
blake2 = { version = "0.10.6", default-features = false }
clap = { version = "4.4.18", features = ["derive", "cargo"] }
clap = { version = "4.5.3", features = ["derive", "cargo"] }
cfg-if = "1.0.0"
criterion = "0.4"
codec = { package = "parity-scale-codec", version = "3.4.0", default-features = false }
color-eyre = "0.6.1"
codec = { package = "parity-scale-codec", version = "3.6.9", default-features = false }
color-eyre = "0.6.3"
console_error_panic_hook = "0.1.7"
darling = "0.20.3"
derivative = "2.2.0"
either = "1.9.0"
frame-metadata = { version = "16.0.0", default-features = false, features = ["current", "std"] }
darling = "0.20.8"
derive-where = "1.2.7"
derive_more = "0.99.17"
either = { version = "1.10.0", default-features = false }
frame-metadata = { version = "16.0.0", default-features = false }
futures = { version = "0.3.30", default-features = false, features = ["std"] }
getrandom = { version = "0.2", default-features = false }
hex = "0.4.3"
hashbrown = "0.14.3"
hex = { version = "0.4.3", default-features = false }
heck = "0.4.1"
impl-serde = { version = "0.4.0" }
impl-serde = { version = "0.4.0", default-features = false }
indoc = "2"
jsonrpsee = { version = "0.21" }
jsonrpsee = { version = "0.22" }
pretty_assertions = "1.4.0"
primitive-types = { version = "0.12.2", default-features = false, features = ["codec", "scale-info", "serde"] }
primitive-types = { version = "0.12.2", default-features = false }
proc-macro-error = "1.0.4"
proc-macro2 = "1.0.78"
proc-macro2 = "1.0.79"
quote = "1.0.35"
regex = "1.10.3"
scale-info = "2.10.0"
scale-value = "0.13.0"
scale-bits = "0.4.0"
scale-decode = "0.10.0"
scale-encode = "0.5.0"
serde = { version = "1.0.196" }
serde_json = { version = "1.0.113" }
regex = { version = "1.10.3", default-features = false }
scale-info = { version = "2.11.0", default-features = false }
scale-value = { version = "0.14.1", default-features = false }
scale-bits = { version = "0.5.0", default-features = false }
scale-decode = { version = "0.11.1", default-features = false }
scale-encode = { version = "0.6.0", default-features = false }
scale-typegen = "0.4.2"
scale-typegen-description = "0.4.2"
serde = { version = "1.0.197", default-features = false, features = ["derive"] }
serde_json = { version = "1.0.114", default-features = false }
syn = { version = "2.0.15", features = ["full", "extra-traits"] }
thiserror = "1.0.53"
tokio = { version = "1.35", default-features = false }
tracing = "0.1.40"
thiserror = "1.0.58"
tokio = { version = "1.36", default-features = false }
tracing = { version = "0.1.40", default-features = false }
tracing-wasm = "0.2.1"
tracing-subscriber = "0.3.18"
trybuild = "1.0.89"
trybuild = "1.0.90"
url = "2.5.0"
wabt = "0.10.0"
wasm-bindgen-test = "0.3.24"
which = "5.0.0"
scale-typegen-description = "0.1.0"
scale-typegen = "0.1.1"
strip-ansi-escapes = "0.2.0"
# Light client support:
@@ -106,42 +117,43 @@ smoldot-light = { version = "0.15.0", default-features = false }
tokio-stream = "0.1.14"
futures-util = "0.3.30"
rand = "0.8.5"
pin-project = "1.1.4"
pin-project = "1.1.5"
# Light client wasm:
web-sys = { version = "0.3.67", features = ["BinaryType", "CloseEvent", "MessageEvent", "WebSocket"] }
wasm-bindgen = "0.2.90"
web-sys = { version = "0.3.69", features = ["BinaryType", "CloseEvent", "MessageEvent", "WebSocket"] }
wasm-bindgen = "0.2.92"
send_wrapper = "0.6.0"
js-sys = "0.3.67"
wasm-bindgen-futures = "0.4.38"
js-sys = "0.3.69"
wasm-bindgen-futures = "0.4.42"
futures-timer = "3"
instant = { version = "0.1.12", default-features = false }
tokio-util = "0.7.10"
# Substrate crates:
sp-core = { version = "28.0.0", default-features = false }
sp-core-hashing = { version = "15.0.0", default-features = false }
sp-runtime = "31.0.0"
sp-keyring = "31.0.0"
sp-core = { version = "31.0.0", default-features = false }
sp-crypto-hashing = { version = "0.1.0", default-features = false }
sp-runtime = "34.0.0"
sp-keyring = "34.0.0"
# Subxt workspace crates:
subxt = { version = "0.34.0", path = "subxt", default-features = false }
subxt-macro = { version = "0.34.0", path = "macro" }
subxt-metadata = { version = "0.34.0", path = "metadata" }
subxt-codegen = { version = "0.34.0", path = "codegen" }
subxt-signer = { version = "0.34.0", path = "signer" }
subxt-lightclient = { version = "0.34.0", path = "lightclient", default-features = false }
subxt = { version = "0.35.0", path = "subxt", default-features = false }
subxt-core = { version = "0.35.0", path = "core", default-features = false }
subxt-macro = { version = "0.35.0", path = "macro" }
subxt-metadata = { version = "0.35.0", path = "metadata", default-features = false }
subxt-codegen = { version = "0.35.0", path = "codegen" }
subxt-signer = { version = "0.35.0", path = "signer", default-features = false }
subxt-lightclient = { version = "0.35.0", path = "lightclient", default-features = false }
test-runtime = { path = "testing/test-runtime" }
substrate-runner = { path = "testing/substrate-runner" }
# subxt-signer deps that I expect aren't useful anywhere else:
bip39 = "2.0.0"
hmac = "0.12.1"
bip39 = { version = "2.0.0", default-features = false }
hmac = { version = "0.12.1", default-features = false }
pbkdf2 = { version = "0.12.2", default-features = false }
schnorrkel = "0.11.4"
secp256k1 = "0.28.1"
schnorrkel = { version = "0.11.4", default-features = false }
secp256k1 = { version = "0.28.2", default-features = false }
secrecy = "0.8.0"
sha2 = "0.10.8"
sha2 = { version = "0.10.8", default-features = false }
zeroize = { version = "1", default-features = false }
[profile.dev.package.smoldot-light]
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
Binary file not shown.
+1
View File
@@ -15,6 +15,7 @@ description = "Command line utilities for working with subxt codegen"
[[bin]]
name = "subxt"
path = "src/main.rs"
doc = false
[lints]
workspace = true
+1 -1
View File
@@ -46,7 +46,7 @@ pub struct Opts {
#[clap(long = "substitute-type", value_parser = substitute_type_parser)]
substitute_types: Vec<(String, String)>,
/// The `subxt` crate access path in the generated code.
/// Defaults to `::subxt`.
/// Defaults to `::subxt::ext::subxt_core`.
#[clap(long = "crate")]
crate_path: Option<String>,
/// Do not generate documentation for the runtime API code.
+1 -4
View File
@@ -154,10 +154,7 @@ fn mocked_offline_client(metadata: Metadata) -> OfflineClient<SubstrateConfig> {
H256::from_str("91b171bb158e2d3848fa23a9f1c25182fb8e20313b2c1eb49219da7a70ce90c3")
.expect("Valid hash; qed");
let runtime_version = subxt::backend::RuntimeVersion {
spec_version: 9370,
transaction_version: 20,
};
let runtime_version = subxt::client::RuntimeVersion::new(9370, 20);
OfflineClient::<SubstrateConfig>::new(genesis_hash, runtime_version, metadata)
}
@@ -64,8 +64,11 @@ pub fn explore_constants(
.highlight();
// value
let value =
scale_value::scale::decode_as_type(&mut constant.value(), constant.ty(), metadata.types())?;
let value = scale_value::scale::decode_as_type(
&mut constant.value(),
&constant.ty(),
metadata.types(),
)?;
let value = format_scale_value(&value).indent(4);
writedoc!(
+1 -1
View File
@@ -169,7 +169,7 @@ pub async fn explore_storage(
{value_str}
"}?;
let key_bytes = value.encode_as_type(type_id, metadata.types())?;
let key_bytes = value.encode_as_type(&type_id, metadata.types())?;
let bytes_composite = Value::from_bytes(key_bytes);
vec![bytes_composite]
}
+3 -4
View File
@@ -19,7 +19,7 @@ use subxt_metadata::RuntimeApiMetadata;
/// Runs for a specified runtime API trait.
/// Cases to consider:
/// ```norun
/// ```txt
/// method is:
/// None => Show pallet docs + available methods
/// Some (invalid) => Show Error + available methods
@@ -161,9 +161,8 @@ pub async fn run<'a>(
{value_str}
"}?;
// encode, then decode. This ensures that the scale value is of the correct shape for the param:
let bytes = value.encode_as_type(ty.ty, metadata.types())?;
let value = Value::decode_as_type(&mut &bytes[..], ty.ty, metadata.types())?
.map_context(|_| ());
let bytes = value.encode_as_type(&ty.ty, metadata.types())?;
let value = Value::decode_as_type(&mut &bytes[..], &ty.ty, metadata.types())?;
Ok(value)
})
.collect::<color_eyre::Result<Vec<Value>>>()?;
+1 -1
View File
@@ -26,7 +26,7 @@ syn = { workspace = true }
scale-info = { workspace = true }
subxt-metadata = { workspace = true }
jsonrpsee = { workspace = true, features = ["async-client", "client-ws-transport-native-tls", "http-client"], optional = true }
hex = { workspace = true }
hex = { workspace = true, features = ["std"] }
tokio = { workspace = true, features = ["rt-multi-thread"], optional = true }
thiserror = { workspace = true }
scale-typegen = { workspace = true }
+10 -5
View File
@@ -6,6 +6,7 @@ use super::CodegenError;
use heck::{ToSnakeCase as _, ToUpperCamelCase as _};
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use scale_typegen::typegen::ir::ToTokensWithSettings;
use scale_typegen::{typegen::ir::type_ir::CompositeIRKind, TypeGenerator};
use subxt_metadata::PalletMetadata;
@@ -16,7 +17,7 @@ use subxt_metadata::PalletMetadata;
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the runtime metadata.
/// - `pallet` - Pallet metadata from which the calls are generated.
/// - `crate_path` - The crate path under which subxt is located, e.g. `::subxt` when using subxt as a dependency.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g. `::subxt::ext::subxt_core` when using subxt as a dependency.
pub fn generate_calls(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
@@ -41,9 +42,9 @@ pub fn generate_calls(
.iter()
.map(|(name, field)| {
// Note: fn_arg_type this is relative the type path of the type alias when prefixed with `types::`, e.g. `set_max_code_size::New`
let fn_arg_type = &field.type_path;
let fn_arg_type = field.type_path.to_token_stream(type_gen.settings());
let call_arg = if field.is_boxed {
quote! { #name: ::std::boxed::Box::new(#name) }
quote! { #name: #crate_path::alloc::boxed::Box::new(#name) }
} else {
quote! { #name }
};
@@ -71,7 +72,9 @@ pub fn generate_calls(
let docs = &var.composite.docs;
// this converts the composite into a full struct type. No Type Parameters needed here.
let struct_def = type_gen.upcast_composite(&var.composite);
let struct_def = type_gen
.upcast_composite(&var.composite)
.to_token_stream(type_gen.settings());
let alias_mod = var.type_alias_mod;
// The call structure's documentation was stripped above.
let call_struct = quote! {
@@ -105,7 +108,9 @@ pub fn generate_calls(
.into_iter()
.unzip();
let call_type = type_gen.resolve_type_path(call_ty)?;
let call_type = type_gen
.resolve_type_path(call_ty)?
.to_token_stream(type_gen.settings());
let call_ty = type_gen.resolve_type(call_ty)?;
let docs = type_gen.docs_from_scale_info(&call_ty.docs);
+5 -2
View File
@@ -5,6 +5,7 @@
use heck::ToSnakeCase as _;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use scale_typegen::typegen::ir::ToTokensWithSettings;
use scale_typegen::TypeGenerator;
use subxt_metadata::PalletMetadata;
@@ -31,7 +32,7 @@ use super::CodegenError;
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the runtime metadata.
/// - `pallet` - Pallet metadata from which the constants are generated.
/// - `crate_path` - The crate path under which subxt is located, e.g. `::subxt` when using subxt as a dependency.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g. `::subxt::ext::subxt_core` when using subxt as a dependency.
pub fn generate_constants(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
@@ -55,7 +56,9 @@ pub fn generate_constants(
));
};
let return_ty = type_gen.resolve_type_path(constant.ty())?;
let return_ty = type_gen
.resolve_type_path(constant.ty())?
.to_token_stream(type_gen.settings());
let docs = constant.docs();
let docs = type_gen
.settings()
+4 -3
View File
@@ -3,12 +3,13 @@
// see LICENSE for license details.
use heck::ToSnakeCase as _;
use scale_typegen::typegen::ir::ToTokensWithSettings;
use scale_typegen::TypeGenerator;
use std::collections::HashSet;
use subxt_metadata::{CustomValueMetadata, Metadata};
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens};
use quote::quote;
/// Generate the custom values mod, if there are any custom values in the metadata. Else returns None.
pub fn generate_custom_values(
@@ -60,8 +61,8 @@ fn generate_custom_value_fn(
let return_ty = type_gen
.resolve_type_path(custom_value.type_id())
.expect("type is in metadata; qed")
.to_token_stream();
let decodable = quote!(#crate_path::custom_values::Yes);
.to_token_stream(type_gen.settings());
let decodable = quote!(#crate_path::utils::Yes);
(return_ty, decodable)
} else {
// if type registry does not contain the type, we can just return the Encoded scale bytes.
+4 -1
View File
@@ -8,6 +8,7 @@ use scale_typegen::TypeGenerator;
use subxt_metadata::PalletMetadata;
use super::CodegenError;
use scale_typegen::typegen::ir::ToTokensWithSettings;
/// Generate error type alias from the provided pallet metadata.
pub fn generate_error_type_alias(
@@ -18,7 +19,9 @@ pub fn generate_error_type_alias(
return Ok(quote!());
};
let error_type = type_gen.resolve_type_path(error_ty)?;
let error_type = type_gen
.resolve_type_path(error_ty)?
.to_token_stream(type_gen.settings());
let error_ty = type_gen.resolve_type(error_ty)?;
let docs = &error_ty.docs;
let docs = type_gen
+9 -5
View File
@@ -2,13 +2,13 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::CodegenError;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use scale_typegen::typegen::ir::ToTokensWithSettings;
use scale_typegen::TypeGenerator;
use subxt_metadata::PalletMetadata;
use super::CodegenError;
/// Generate events from the provided pallet metadata.
///
/// The function creates a new module named `events` under the pallet's module.
@@ -37,7 +37,7 @@ use super::CodegenError;
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the runtime metadata.
/// - `pallet` - Pallet metadata from which the events are generated.
/// - `crate_path` - The crate path under which subxt is located, e.g. `::subxt` when using subxt as a dependency.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g. `::subxt::ext::subxt_core` when using subxt as a dependency.
pub fn generate_events(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
@@ -56,7 +56,9 @@ pub fn generate_events(
let event_struct_name = &var.composite.name;
let event_name = var.variant_name;
let alias_mod = var.type_alias_mod;
let struct_def = type_gen.upcast_composite(&var.composite);
let struct_def = type_gen
.upcast_composite(&var.composite)
.to_token_stream(type_gen.settings());
quote! {
#struct_def
#alias_mod
@@ -68,7 +70,9 @@ pub fn generate_events(
}
});
let event_type = type_gen.resolve_type_path(event_ty)?;
let event_type = type_gen
.resolve_type_path(event_ty)?
.to_token_stream(type_gen.settings());
let event_ty = type_gen.resolve_type(event_ty)?;
let docs = &event_ty.docs;
let docs = type_gen
+18 -7
View File
@@ -13,6 +13,7 @@ mod runtime_apis;
mod storage;
use scale_typegen::typegen::ir::type_ir::{CompositeFieldIR, CompositeIR, CompositeIRKind};
use scale_typegen::typegen::ir::ToTokensWithSettings;
use scale_typegen::typegen::type_params::TypeParameters;
use scale_typegen::typegen::type_path::TypePath;
use scale_typegen::TypeGenerator;
@@ -44,7 +45,7 @@ impl RuntimeGenerator {
///
/// Supported versions: v14 and v15.
pub fn new(mut metadata: Metadata) -> Self {
metadata.ensure_unique_type_paths();
scale_typegen::utils::ensure_unique_type_paths(metadata.types_mut());
RuntimeGenerator { metadata }
}
@@ -72,7 +73,9 @@ impl RuntimeGenerator {
subxt_type_gen_settings(derives, type_substitutes, &crate_path, should_gen_docs);
let type_gen = TypeGenerator::new(self.metadata.types(), &settings);
let types_mod = type_gen.generate_types_mod()?;
let types_mod = type_gen
.generate_types_mod()?
.to_token_stream(type_gen.settings());
let mod_ident = &item_mod_ir.ident;
let rust_items = item_mod_ir.rust_items();
@@ -121,7 +124,9 @@ impl RuntimeGenerator {
subxt_type_gen_settings(derives, type_substitutes, &crate_path, should_gen_docs);
let type_gen = TypeGenerator::new(self.metadata.types(), &settings);
let types_mod = type_gen.generate_types_mod()?;
let types_mod = type_gen
.generate_types_mod()?
.to_token_stream(type_gen.settings());
let types_mod_ident = type_gen.types_mod_ident();
let pallets_with_mod_names = self
.metadata
@@ -214,9 +219,15 @@ impl RuntimeGenerator {
// Fetch the paths of the outer enums.
// Substrate exposes those under `kitchensink_runtime`, while Polkadot under `polkadot_runtime`.
let call_path = type_gen.resolve_type_path(self.metadata.outer_enums().call_enum_ty())?;
let event_path = type_gen.resolve_type_path(self.metadata.outer_enums().event_enum_ty())?;
let error_path = type_gen.resolve_type_path(self.metadata.outer_enums().error_enum_ty())?;
let call_path = type_gen
.resolve_type_path(self.metadata.outer_enums().call_enum_ty())?
.to_token_stream(type_gen.settings());
let event_path = type_gen
.resolve_type_path(self.metadata.outer_enums().event_enum_ty())?
.to_token_stream(type_gen.settings());
let error_path = type_gen
.resolve_type_path(self.metadata.outer_enums().error_enum_ty())?
.to_token_stream(type_gen.settings());
let custom_values = generate_custom_values(&self.metadata, &type_gen, &crate_path);
@@ -399,7 +410,7 @@ pub fn generate_type_alias_mod(
.expect("composite name in snake_case should be a valid identifier");
let mut modify_field_to_be_type_alias = |field: &mut CompositeFieldIR, alias_name: Ident| {
let type_path = &field.type_path;
let type_path = field.type_path.to_token_stream(type_gen.settings());
aliases.push(quote!(pub type #alias_name = #type_path;));
let type_alias_path: syn::Path = parse_quote!(#alias_mod_name::#alias_name);
+3 -2
View File
@@ -7,6 +7,7 @@ use std::collections::HashSet;
use heck::ToSnakeCase as _;
use heck::ToUpperCamelCase as _;
use scale_typegen::typegen::ir::ToTokensWithSettings;
use scale_typegen::TypeGenerator;
use subxt_metadata::{Metadata, RuntimeApiMetadata};
@@ -77,7 +78,7 @@ fn generate_runtime_api(
// Generate alias for runtime type.
let ty = type_gen
.resolve_type_path(input.ty)
.expect("runtime api input type is in metadata; qed");
.expect("runtime api input type is in metadata; qed").to_token_stream(type_gen.settings());
let aliased_param = quote!( pub type #alias_name = #ty; );
// Structures are placed on the same level as the alias module.
@@ -96,7 +97,7 @@ fn generate_runtime_api(
let type_aliases = inputs.iter().map(|(_, _, _, aliased_param)| aliased_param);
let types_mod_ident = type_gen.types_mod_ident();
let output = type_gen.resolve_type_path(method.output_ty())?;
let output = type_gen.resolve_type_path(method.output_ty())?.to_token_stream(type_gen.settings());
let aliased_module = quote!(
pub mod #method_name {
use super::#types_mod_ident;
+138 -32
View File
@@ -8,11 +8,13 @@ use quote::{format_ident, quote};
use scale_info::TypeDef;
use scale_typegen::{typegen::type_path::TypePath, TypeGenerator};
use subxt_metadata::{
PalletMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType,
PalletMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher,
};
use super::CodegenError;
use scale_typegen::typegen::ir::ToTokensWithSettings;
/// Generate functions which create storage addresses from the provided pallet's metadata.
/// These addresses can be used to access and iterate over storage values.
///
@@ -20,7 +22,7 @@ use super::CodegenError;
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the runtime metadata.
/// - `pallet` - Pallet metadata from which the storage items are generated.
/// - `crate_path` - The crate path under which subxt is located, e.g. `::subxt` when using subxt as a dependency.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g. `::subxt::ext::subxt_core` when using subxt as a dependency.
pub fn generate_storage(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
@@ -69,49 +71,90 @@ fn generate_storage_entry_fns(
let storage_entry_ty = storage_entry.entry_type().value_ty();
let storage_entry_value_ty = type_gen
.resolve_type_path(storage_entry_ty)
.expect("storage type is in metadata; qed");
.expect("storage type is in metadata; qed")
.to_token_stream(type_gen.settings());
let alias_name = format_ident!("{}", storage_entry.name().to_upper_camel_case());
let alias_module_name = format_ident!("{snake_case_name}");
let alias_storage_path = quote!( types::#alias_module_name::#alias_name );
let storage_entry_map = |idx, id| {
let ident: Ident = format_ident!("_{}", idx);
struct MapEntryKey {
arg_name: Ident,
alias_type_def: TokenStream,
alias_type_path: TokenStream,
hasher: StorageHasher,
}
let map_entry_key = |idx, id, hasher| -> MapEntryKey {
let arg_name: Ident = format_ident!("_{}", idx);
let ty_path = type_gen
.resolve_type_path(id)
.expect("type is in metadata; qed");
let alias_name = format_ident!("Param{}", idx);
let alias_type = primitive_type_alias(&ty_path);
let alias_type = primitive_type_alias(&ty_path, type_gen.settings());
let alias_type = quote!( pub type #alias_name = #alias_type; );
let path_to_alias = quote!( types::#alias_module_name::#alias_name );
let alias_type_def = quote!( pub type #alias_name = #alias_type; );
let alias_type_path = quote!( types::#alias_module_name::#alias_name );
(ident, alias_type, path_to_alias)
MapEntryKey {
arg_name,
alias_type_def,
alias_type_path,
hasher,
}
};
let keys: Vec<(Ident, TokenStream, TokenStream)> = match storage_entry.entry_type() {
let keys: Vec<MapEntryKey> = match storage_entry.entry_type() {
StorageEntryType::Plain(_) => vec![],
StorageEntryType::Map { key_ty, .. } => {
StorageEntryType::Map {
key_ty, hashers, ..
} => {
match &type_gen
.resolve_type(*key_ty)
.expect("key type should be present")
.type_def
{
// An N-map; return each of the keys separately.
TypeDef::Tuple(tuple) => tuple
.fields
.iter()
.enumerate()
.map(|(idx, f)| storage_entry_map(idx, f.id))
.collect::<Vec<_>>(),
TypeDef::Tuple(tuple) => {
let key_count = tuple.fields.len();
let hasher_count = hashers.len();
if hasher_count != 1 && hasher_count != key_count {
return Err(CodegenError::InvalidStorageHasherCount {
storage_entry_name: storage_entry.name().to_owned(),
key_count,
hasher_count,
});
}
let mut map_entry_keys: Vec<MapEntryKey> = vec![];
for (idx, field) in tuple.fields.iter().enumerate() {
// Note: these are in bounds because of the checks above, qed;
let hasher = if idx >= hasher_count {
hashers[0]
} else {
hashers[idx]
};
map_entry_keys.push(map_entry_key(idx, field.id, hasher));
}
map_entry_keys
}
// A map with a single key; return the single key.
_ => {
vec![storage_entry_map(0, *key_ty)]
let Some(hasher) = hashers.first() else {
return Err(CodegenError::InvalidStorageHasherCount {
storage_entry_name: storage_entry.name().to_owned(),
key_count: 1,
hasher_count: 0,
});
};
vec![map_entry_key(0, *key_ty, *hasher)]
}
}
}
};
let pallet_name = pallet.name();
let storage_name = storage_entry.name();
let Some(storage_hash) = pallet.storage_hash(storage_name) else {
@@ -129,10 +172,14 @@ fn generate_storage_entry_fns(
.unwrap_or_default();
let is_defaultable_type = match storage_entry.modifier() {
StorageEntryModifier::Default => quote!(#crate_path::storage::address::Yes),
StorageEntryModifier::Default => quote!(#crate_path::utils::Yes),
StorageEntryModifier::Optional => quote!(()),
};
// Note: putting `#crate_path::storage::address::StaticStorageKey` into this variable is necessary
// to get the line width below a certain limit. If not done, rustfmt will refuse to format the following big expression.
// for more information see [this post](https://users.rust-lang.org/t/rustfmt-silently-fails-to-work/75485/4).
let static_storage_key: TokenStream = quote!(#crate_path::storage::address::StaticStorageKey);
let all_fns = (0..=keys.len()).map(|n_keys| {
let keys_slice = &keys[..n_keys];
let (fn_name, is_fetchable, is_iterable) = if n_keys == keys.len() {
@@ -146,12 +193,65 @@ fn generate_storage_entry_fns(
};
(fn_name, false, true)
};
let is_fetchable_type = is_fetchable.then_some(quote!(#crate_path::storage::address::Yes)).unwrap_or(quote!(()));
let is_iterable_type = is_iterable.then_some(quote!(#crate_path::storage::address::Yes)).unwrap_or(quote!(()));
let key_impls = keys_slice.iter().map(|(field_name, _, _)| quote!( #crate_path::storage::address::make_static_storage_map_key(#field_name.borrow()) ));
let key_args = keys_slice.iter().map(|(field_name, _, path_to_alias )| {
quote!( #field_name: impl ::std::borrow::Borrow<#path_to_alias> )
});
let is_fetchable_type = is_fetchable
.then_some(quote!(#crate_path::utils::Yes))
.unwrap_or(quote!(()));
let is_iterable_type = is_iterable
.then_some(quote!(#crate_path::utils::Yes))
.unwrap_or(quote!(()));
let (keys, keys_type) = match keys_slice.len() {
0 => (quote!(()), quote!(())),
1 => {
let key = &keys_slice[0];
if key.hasher.ends_with_key() {
let arg = &key.arg_name;
let keys = quote!(#static_storage_key::new(#arg.borrow()));
let path = &key.alias_type_path;
let path = quote!(#static_storage_key<#path>);
(keys, path)
} else {
(quote!(()), quote!(()))
}
}
_ => {
let keys_iter = keys_slice.iter().map(
|MapEntryKey {
arg_name, hasher, ..
}| {
if hasher.ends_with_key() {
quote!( #static_storage_key::new(#arg_name.borrow()) )
} else {
quote!(())
}
},
);
let keys = quote!( (#(#keys_iter,)*) );
let paths_iter = keys_slice.iter().map(
|MapEntryKey {
alias_type_path,
hasher,
..
}| {
if hasher.ends_with_key() {
quote!( #static_storage_key<#alias_type_path> )
} else {
quote!(())
}
},
);
let paths = quote!( (#(#paths_iter,)*) );
(keys, paths)
}
};
let key_args = keys_slice.iter().map(
|MapEntryKey {
arg_name,
alias_type_path,
..
}| quote!( #arg_name: impl ::core::borrow::Borrow<#alias_type_path> ),
);
quote!(
#docs
@@ -159,7 +259,7 @@ fn generate_storage_entry_fns(
&self,
#(#key_args,)*
) -> #crate_path::storage::address::Address::<
#crate_path::storage::address::StaticStorageMapKey,
#keys_type,
#alias_storage_path,
#is_fetchable_type,
#is_defaultable_type,
@@ -168,14 +268,16 @@ fn generate_storage_entry_fns(
#crate_path::storage::address::Address::new_static(
#pallet_name,
#storage_name,
vec![#(#key_impls,)*],
#keys,
[#(#storage_hash,)*]
)
}
)
});
let alias_types = keys.iter().map(|(_, alias_type, _)| alias_type);
let alias_types = keys
.iter()
.map(|MapEntryKey { alias_type_def, .. }| alias_type_def);
let types_mod_ident = type_gen.types_mod_ident();
// Generate type alias for the return type only, since
@@ -198,16 +300,20 @@ fn generate_storage_entry_fns(
))
}
fn primitive_type_alias(type_path: &TypePath) -> TokenStream {
fn primitive_type_alias(
type_path: &TypePath,
settings: &scale_typegen::TypeGeneratorSettings,
) -> TokenStream {
// Vec<T> is cast to [T]
if let Some(ty) = type_path.vec_type_param() {
let ty = ty.to_token_stream(settings);
return quote!([#ty]);
}
// String is cast to str
if type_path.is_string() {
return quote!(::core::primitive::str);
}
quote!(#type_path)
type_path.to_token_stream(settings)
}
#[cfg(test)]
@@ -231,7 +337,7 @@ mod tests {
name,
modifier: v15::StorageEntryModifier::Optional,
ty: v15::StorageEntryType::Map {
hashers: vec![],
hashers: vec![v15::StorageHasher::Blake2_128Concat],
key,
value: meta_type::<bool>(),
},
@@ -325,7 +431,7 @@ mod tests {
let expected_storage_constructor = quote!(
fn #name_ident(
&self,
_0: impl ::std::borrow::Borrow<types::#name_ident::Param0>,
_0: impl ::core::borrow::Borrow<types::#name_ident::Param0>,
)
);
dbg!(&generated_str);
+12 -6
View File
@@ -39,15 +39,21 @@ pub enum CodegenError {
#[error("Call variant for type {0} must have all named fields. Make sure you are providing a valid substrate-based metadata")]
InvalidCallVariant(u32),
/// Type should be an variant/enum.
#[error(
"{0} type should be an variant/enum type. Make sure you are providing a valid substrate-based metadata"
)]
#[error("{0} type should be an variant/enum type. Make sure you are providing a valid substrate-based metadata")]
InvalidType(String),
/// Extrinsic call type could not be found.
#[error(
"Extrinsic call type could not be found. Make sure you are providing a valid substrate-based metadata"
)]
#[error("Extrinsic call type could not be found. Make sure you are providing a valid substrate-based metadata")]
MissingCallType,
/// There are too many or too few hashers.
#[error("Could not generate functions for storage entry {storage_entry_name}. There are {key_count} keys, but only {hasher_count} hashers. The number of hashers must equal the number of keys or be exactly 1.")]
InvalidStorageHasherCount {
/// The name of the storage entry
storage_entry_name: String,
/// Number of keys
key_count: usize,
/// Number of hashers
hasher_count: usize,
},
/// Cannot generate types.
#[error("Type Generation failed: {0}")]
TypeGeneration(#[from] TypegenError),
+10 -5
View File
@@ -25,6 +25,7 @@ use getrandom as _;
use api::RuntimeGenerator;
use proc_macro2::TokenStream as TokenStream2;
use scale_typegen::typegen::settings::AllocCratePath;
use scale_typegen::{
typegen::settings::substitutes::absolute_path, DerivesRegistry, TypeGeneratorSettings,
TypeSubstitutes, TypegenError,
@@ -77,7 +78,7 @@ pub struct CodegenBuilder {
impl Default for CodegenBuilder {
fn default() -> Self {
CodegenBuilder {
crate_path: syn::parse_quote!(::subxt),
crate_path: syn::parse_quote!(::subxt::ext::subxt_core),
use_default_derives: true,
use_default_substitutions: true,
generate_docs: true,
@@ -222,12 +223,12 @@ impl CodegenBuilder {
self.item_mod = item_mod;
}
/// Set the path to the `subxt` crate. By default, we expect it to be at `::subxt`.
/// Set the path to the `subxt` crate. By default, we expect it to be at `::subxt::ext::subxt_core`.
pub fn set_subxt_crate_path(&mut self, crate_path: syn::Path) {
self.crate_path = crate_path;
}
/// Generate an interface, assuming that the default path to the `subxt` crate is `::subxt`.
/// Generate an interface, assuming that the default path to the `subxt` crate is `::subxt::ext::subxt_core`.
/// If the `subxt` crate is not available as a top level dependency, use `generate` and provide
/// a valid path to the `subxt¦ crate.
pub fn generate(self, metadata: Metadata) -> Result<TokenStream2, CodegenError> {
@@ -295,7 +296,7 @@ impl CodegenBuilder {
/// The default [`scale_typegen::TypeGeneratorSettings`], subxt is using for generating code.
/// Useful for emulating subxt's code generation settings from e.g. subxt-explorer.
pub fn default_subxt_type_gen_settings() -> TypeGeneratorSettings {
let crate_path: syn::Path = parse_quote!(::subxt);
let crate_path: syn::Path = parse_quote!(::subxt::ext::subxt_core);
let derives = default_derives(&crate_path);
let substitutes = default_substitutes(&crate_path);
subxt_type_gen_settings(derives, substitutes, &crate_path, true)
@@ -316,6 +317,7 @@ fn subxt_type_gen_settings(
compact_as_type_path: Some(parse_quote!(#crate_path::ext::codec::CompactAs)),
compact_type_path: Some(parse_quote!(#crate_path::ext::codec::Compact)),
insert_codec_attributes: true,
alloc_crate_path: AllocCratePath::Custom(parse_quote!(#crate_path::alloc)),
}
}
@@ -387,7 +389,10 @@ fn default_substitutes(crate_path: &syn::Path) -> TypeSubstitutes {
parse_quote!(BTreeMap),
parse_quote!(#crate_path::utils::KeyedVec),
),
(parse_quote!(BTreeSet), parse_quote!(::std::vec::Vec)),
(
parse_quote!(BTreeSet),
parse_quote!(#crate_path::alloc::vec::Vec),
),
// The `UncheckedExtrinsic(pub Vec<u8>)` is part of the runtime API calls.
// The inner bytes represent the encoded extrinsic, however when deriving the
// `EncodeAsType` the bytes would be re-encoded. This leads to the bytes
+82
View File
@@ -0,0 +1,82 @@
[package]
name = "subxt-core"
version.workspace = true
authors.workspace = true
edition.workspace = true
rust-version.workspace = true
publish = true
license.workspace = true
readme = "README.md"
repository.workspace = true
documentation.workspace = true
homepage.workspace = true
description = "A no-std compatible subset of Subxt's functionality"
keywords = ["parity", "subxt", "extrinsic", "no-std"]
[features]
default = ["std"]
std = [
"codec/std",
"scale-info/std",
"scale-value/std",
"scale-bits/std",
"scale-decode/std",
"scale-encode/std",
"frame-metadata/std",
"subxt-metadata/std",
"hex/std",
"serde/std",
"serde_json/std",
"tracing/std",
"impl-serde/std",
"primitive-types/std",
]
substrate-compat = ["sp-core", "sp-runtime"]
[dependencies]
codec = { package = "parity-scale-codec", workspace = true, default-features = false, features = ["derive"] }
scale-info = { workspace = true, default-features = false, features = ["bit-vec"] }
scale-value = { workspace = true, default-features = false }
scale-bits = { workspace = true, default-features = false }
scale-decode = { workspace = true, default-features = false, features = ["derive", "primitive-types"] }
scale-encode = { workspace = true, default-features = false, features = ["derive", "primitive-types", "bits"] }
frame-metadata = { workspace = true, default-features = false }
subxt-metadata = { workspace = true, default-features = false }
derive-where = { workspace = true }
derive_more = { workspace = true }
hex = { workspace = true, default-features = false, features = ["alloc"] }
serde = { workspace = true, default-features = false, features = ["derive"] }
serde_json = { workspace = true, default-features = false, features = ["raw_value", "alloc"] }
hashbrown = { workspace = true }
# For ss58 encoding AccountId32 to serialize them properly:
base58 = { workspace = true }
blake2 = { workspace = true }
# Provides some deserialization, types like U256/H256 and hashing impls like twox/blake256:
impl-serde = { workspace = true, default-features = false }
primitive-types = { workspace = true, default-features = false, features = ["codec", "serde_no_std", "scale-info"] }
sp-crypto-hashing = { workspace = true }
# Included if the "substrate-compat" feature is enabled.
sp-core = { workspace = true, optional = true }
sp-runtime = { workspace = true, optional = true }
tracing = { workspace = true, default-features = false }
[dev-dependencies]
bitvec = { workspace = true }
codec = { workspace = true, features = ["derive", "bit-vec"] }
sp-core = { workspace = true }
sp-keyring = { workspace = true }
sp-runtime = { workspace = true }
[package.metadata.docs.rs]
defalt-features = true
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.playground]
defalt-features = true
+3
View File
@@ -0,0 +1,3 @@
# Subxt-Core
This library provides a no-std compatible subset of functionality that `subxt` and `subxt-signer` rely on.
+19
View File
@@ -0,0 +1,19 @@
use scale_decode::DecodeAsFields;
/// Trait to uniquely identify the extrinsic's identity from the runtime metadata.
///
/// Generated API structures that represent an extrinsic implement this trait.
///
/// The trait is utilized to decode emitted extrinsics from a block, via obtaining the
/// form of the `Extrinsic` from the metadata.
pub trait StaticExtrinsic: DecodeAsFields {
/// Pallet name.
const PALLET: &'static str;
/// Call name.
const CALL: &'static str;
/// Returns true if the given pallet and call names match this extrinsic.
fn is_extrinsic(pallet: &str, call: &str) -> bool {
Self::PALLET == pallet && Self::CALL == call
}
}
+71
View File
@@ -0,0 +1,71 @@
use crate::{config::Config, metadata::Metadata};
use derive_where::derive_where;
/// Each client should be able to provide access to the following fields
/// - runtime version
/// - genesis hash
/// - metadata
#[derive_where(Clone, Debug)]
pub struct ClientState<C: Config> {
genesis_hash: C::Hash,
runtime_version: RuntimeVersion,
metadata: Metadata,
}
impl<C: Config> ClientState<C> {
pub fn new(genesis_hash: C::Hash, runtime_version: RuntimeVersion, metadata: Metadata) -> Self {
Self {
genesis_hash,
runtime_version,
metadata,
}
}
pub fn metadata(&self) -> Metadata {
self.metadata.clone()
}
pub fn runtime_version(&self) -> RuntimeVersion {
self.runtime_version
}
pub fn genesis_hash(&self) -> C::Hash {
self.genesis_hash
}
}
/// Runtime version information needed to submit transactions.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct RuntimeVersion {
spec_version: u32,
transaction_version: u32,
}
impl RuntimeVersion {
pub fn new(spec_version: u32, transaction_version: u32) -> Self {
RuntimeVersion {
spec_version,
transaction_version,
}
}
/// Version of the runtime specification. A full-node will not attempt to use its native
/// runtime in substitute for the on-chain Wasm runtime unless all of `spec_name`,
/// `spec_version` and `authoring_version` are the same between Wasm and native.
pub fn spec_version(&self) -> u32 {
self.spec_version
}
/// All existing dispatches are fully compatible when this number doesn't change. If this
/// number changes, then `spec_version` must change, also.
///
/// This number must change when an existing dispatchable (module ID, dispatch ID) is changed,
/// either through an alteration in its user-level semantics, a parameter
/// added/removed/changed, a dispatchable being removed, a module being removed, or a
/// dispatchable/module changing its index.
///
/// It need *not* change when a new module is added or when a dispatchable is added.
pub fn transaction_version(&self) -> u32 {
self.transaction_version
}
}
@@ -2,6 +2,7 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::signed_extensions::CheckNonceParams;
use super::{signed_extensions, ExtrinsicParams};
use super::{Config, Header};
@@ -20,12 +21,14 @@ pub type DefaultExtrinsicParams<T> = signed_extensions::AnyOf<
),
>;
/// A builder that outputs the set of [`super::ExtrinsicParams::OtherParams`] required for
/// A builder that outputs the set of [`super::ExtrinsicParams::Params`] required for
/// [`DefaultExtrinsicParams`]. This may expose methods that aren't applicable to the current
/// chain; such values will simply be ignored if so.
pub struct DefaultExtrinsicParamsBuilder<T: Config> {
/// `None` means the tx will be immortal.
mortality: Option<Mortality<T::Hash>>,
/// `None` means the nonce will be automatically set.
nonce: Option<u64>,
/// `None` means we'll use the native token.
tip_of_asset_id: Option<T::AssetId>,
tip: u128,
@@ -49,6 +52,7 @@ impl<T: Config> Default for DefaultExtrinsicParamsBuilder<T> {
tip: 0,
tip_of: 0,
tip_of_asset_id: None,
nonce: None,
}
}
}
@@ -72,6 +76,12 @@ impl<T: Config> DefaultExtrinsicParamsBuilder<T> {
self
}
/// Provide a specific nonce for the submitter of the extrinsic
pub fn nonce(mut self, nonce: u64) -> Self {
self.nonce = Some(nonce);
self
}
/// Make the transaction mortal, given a block number and block hash (which must both point to
/// the same block) that it should be mortal from, and the number of blocks (roughly; it'll be
/// rounded to a power of two) that it will be mortal for.
@@ -111,7 +121,7 @@ impl<T: Config> DefaultExtrinsicParamsBuilder<T> {
}
/// Build the extrinsic parameters.
pub fn build(self) -> <DefaultExtrinsicParams<T> as ExtrinsicParams<T>>::OtherParams {
pub fn build(self) -> <DefaultExtrinsicParams<T> as ExtrinsicParams<T>>::Params {
let check_mortality_params = if let Some(mortality) = self.mortality {
signed_extensions::CheckMortalityParams::mortal(
mortality.period,
@@ -131,10 +141,12 @@ impl<T: Config> DefaultExtrinsicParamsBuilder<T> {
let charge_transaction_params =
signed_extensions::ChargeTransactionPaymentParams::tip(self.tip);
let check_nonce_params = CheckNonceParams(self.nonce);
(
(),
(),
(),
check_nonce_params,
(),
check_mortality_params,
charge_asset_tx_params,
@@ -7,44 +7,9 @@
//! [`crate::config::DefaultExtrinsicParams`] provides a general-purpose
//! implementation of this that will work in many cases.
use crate::{client::OfflineClientT, Config};
use core::fmt::Debug;
/// An error that can be emitted when trying to construct an instance of [`ExtrinsicParams`],
/// encode data from the instance, or match on signed extensions.
#[derive(thiserror::Error, Debug)]
#[non_exhaustive]
pub enum ExtrinsicParamsError {
/// Cannot find a type id in the metadata. The context provides some additional
/// information about the source of the error (eg the signed extension name).
#[error("Cannot find type id '{type_id} in the metadata (context: {context})")]
MissingTypeId {
/// Type ID.
type_id: u32,
/// Some arbitrary context to help narrow the source of the error.
context: &'static str,
},
/// A signed extension in use on some chain was not provided.
#[error("The chain expects a signed extension with the name {0}, but we did not provide one")]
UnknownSignedExtension(String),
/// Some custom error.
#[error("Error constructing extrinsic parameters: {0}")]
Custom(CustomExtrinsicParamsError),
}
/// A custom error.
pub type CustomExtrinsicParamsError = Box<dyn std::error::Error + Send + Sync + 'static>;
impl From<std::convert::Infallible> for ExtrinsicParamsError {
fn from(value: std::convert::Infallible) -> Self {
match value {}
}
}
impl From<CustomExtrinsicParamsError> for ExtrinsicParamsError {
fn from(value: CustomExtrinsicParamsError) -> Self {
ExtrinsicParamsError::Custom(value)
}
}
use super::refine_params::RefineParams;
use crate::{client::ClientState, error::ExtrinsicParamsError, Config};
use alloc::vec::Vec;
/// This trait allows you to configure the "signed extra" and
/// "additional" parameters that are a part of the transaction payload
@@ -53,14 +18,10 @@ pub trait ExtrinsicParams<T: Config>: ExtrinsicParamsEncoder + Sized + 'static {
/// These parameters can be provided to the constructor along with
/// some default parameters that `subxt` understands, in order to
/// help construct your [`ExtrinsicParams`] object.
type OtherParams;
type Params: RefineParams<T>;
/// Construct a new instance of our [`ExtrinsicParams`].
fn new<Client: OfflineClientT<T>>(
nonce: u64,
client: Client,
other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError>;
fn new(client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError>;
}
/// This trait is expected to be implemented for any [`ExtrinsicParams`], and
@@ -1,4 +1,4 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
@@ -10,6 +10,7 @@
mod default_extrinsic_params;
mod extrinsic_params;
mod refine_params;
pub mod polkadot;
pub mod signed_extensions;
@@ -23,8 +24,9 @@ use scale_encode::EncodeAsType;
use serde::{de::DeserializeOwned, Serialize};
pub use default_extrinsic_params::{DefaultExtrinsicParams, DefaultExtrinsicParamsBuilder};
pub use extrinsic_params::{ExtrinsicParams, ExtrinsicParamsEncoder, ExtrinsicParamsError};
pub use extrinsic_params::{ExtrinsicParams, ExtrinsicParamsEncoder};
pub use polkadot::{PolkadotConfig, PolkadotExtrinsicParams, PolkadotExtrinsicParamsBuilder};
pub use refine_params::{RefineParams, RefineParamsData};
pub use signed_extensions::SignedExtension;
pub use substrate::{SubstrateConfig, SubstrateExtrinsicParams, SubstrateExtrinsicParamsBuilder};
@@ -60,7 +62,7 @@ pub trait Config: Sized + Send + Sync + 'static {
}
/// given some [`Config`], this return the other params needed for its `ExtrinsicParams`.
pub type OtherParamsFor<T> = <<T as Config>::ExtrinsicParams as ExtrinsicParams<T>>::OtherParams;
pub type ParamsFor<T> = <<T as Config>::ExtrinsicParams as ExtrinsicParams<T>>::Params;
/// Block hashes must conform to a bunch of things to be used in Subxt.
pub trait BlockHash:
@@ -75,7 +77,7 @@ pub trait BlockHash:
+ Encode
+ PartialEq
+ Eq
+ std::hash::Hash
+ core::hash::Hash
{
}
impl<T> BlockHash for T where
@@ -90,7 +92,7 @@ impl<T> BlockHash for T where
+ Encode
+ PartialEq
+ Eq
+ std::hash::Hash
+ core::hash::Hash
{
}
@@ -6,8 +6,8 @@
use super::{Config, DefaultExtrinsicParams, DefaultExtrinsicParamsBuilder};
use crate::config::SubstrateConfig;
pub use crate::utils::{AccountId32, MultiAddress, MultiSignature};
use crate::SubstrateConfig;
pub use primitive_types::{H256, U256};
/// Default set of commonly used types by Polkadot nodes.
+87
View File
@@ -0,0 +1,87 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Refining params with values fetched from the chain
use crate::Config;
/// Data that can be used to refine the params of signed extensions.
pub struct RefineParamsData<T: Config> {
account_nonce: u64,
block_number: u64,
block_hash: T::Hash,
}
impl<T: Config> RefineParamsData<T> {
#[doc(hidden)]
/// Creates a new [`RefineParamsData`] instance. Called from `subxt` when refining signed extensions.
pub fn new(account_nonce: u64, block_number: u64, block_hash: T::Hash) -> Self {
RefineParamsData {
account_nonce,
block_number,
block_hash,
}
}
/// account nonce for extrinsic author
pub fn account_nonce(&self) -> u64 {
self.account_nonce
}
/// latest finalized block number
pub fn block_number(&self) -> u64 {
self.block_number
}
/// latest finalized block hash
pub fn block_hash(&self) -> T::Hash {
self.block_hash
}
}
/// Types implementing [`RefineParams`] can be modified to reflect live information from the chain.
pub trait RefineParams<T: Config> {
/// Refine params to an extrinsic. There is usually some notion of 'the param is already set/unset' in types implementing this trait.
/// The refinement should most likely not affect cases where a param is in a 'is already set by the user' state.
fn refine(&mut self, _data: &RefineParamsData<T>) {}
}
impl<T: Config> RefineParams<T> for () {}
macro_rules! impl_tuples {
($($ident:ident $index:tt),+) => {
impl <T: Config, $($ident : RefineParams<T>),+> RefineParams<T> for ($($ident,)+){
fn refine(&mut self, data: &RefineParamsData<T>) {
$(self.$index.refine(data);)+
}
}
}
}
#[rustfmt::skip]
const _: () = {
impl_tuples!(A 0);
impl_tuples!(A 0, B 1);
impl_tuples!(A 0, B 1, C 2);
impl_tuples!(A 0, B 1, C 2, D 3);
impl_tuples!(A 0, B 1, C 2, D 3, E 4);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, U 19);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, U 19, V 20);
};
@@ -7,17 +7,24 @@
//! [`AnyOf`] to configure the set of signed extensions which are known about
//! when interacting with a chain.
use super::extrinsic_params::{ExtrinsicParams, ExtrinsicParamsEncoder, ExtrinsicParamsError};
use super::extrinsic_params::ExtrinsicParams;
use super::refine_params::RefineParamsData;
use super::RefineParams;
use crate::client::ClientState;
use crate::config::ExtrinsicParamsEncoder;
use crate::error::ExtrinsicParamsError;
use crate::utils::Era;
use crate::{client::OfflineClientT, Config};
use crate::Config;
use alloc::borrow::ToOwned;
use alloc::boxed::Box;
use alloc::vec::Vec;
use codec::{Compact, Encode};
use core::fmt::Debug;
use derivative::Derivative;
use derive_where::derive_where;
use hashbrown::HashMap;
use scale_decode::DecodeAsType;
use scale_info::PortableRegistry;
use std::collections::HashMap;
/// A single [`SignedExtension`] has a unique name, but is otherwise the
/// same as [`ExtrinsicParams`] in describing how to encode the extra and
/// additional data.
@@ -37,14 +44,10 @@ pub trait SignedExtension<T: Config>: ExtrinsicParams<T> {
pub struct CheckSpecVersion(u32);
impl<T: Config> ExtrinsicParams<T> for CheckSpecVersion {
type OtherParams = ();
type Params = ();
fn new<Client: OfflineClientT<T>>(
_nonce: u64,
client: Client,
_other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckSpecVersion(client.runtime_version().spec_version))
fn new(client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckSpecVersion(client.runtime_version().spec_version()))
}
}
@@ -65,13 +68,11 @@ impl<T: Config> SignedExtension<T> for CheckSpecVersion {
pub struct CheckNonce(Compact<u64>);
impl<T: Config> ExtrinsicParams<T> for CheckNonce {
type OtherParams = ();
type Params = CheckNonceParams;
fn new<Client: OfflineClientT<T>>(
nonce: u64,
_client: Client,
_other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
fn new(_client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
// If no nonce is set (nor by user nor refinement), use a nonce of 0.
let nonce = params.0.unwrap_or(0);
Ok(CheckNonce(Compact(nonce)))
}
}
@@ -89,18 +90,28 @@ impl<T: Config> SignedExtension<T> for CheckNonce {
}
}
/// Params for [`CheckNonce`]
#[derive(Debug, Clone, Default)]
pub struct CheckNonceParams(pub Option<u64>);
impl<T: Config> RefineParams<T> for CheckNonceParams {
fn refine(&mut self, data: &RefineParamsData<T>) {
if self.0.is_none() {
self.0 = Some(data.account_nonce());
}
}
}
/// The [`CheckTxVersion`] signed extension.
pub struct CheckTxVersion(u32);
impl<T: Config> ExtrinsicParams<T> for CheckTxVersion {
type OtherParams = ();
type Params = ();
fn new<Client: OfflineClientT<T>>(
_nonce: u64,
client: Client,
_other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckTxVersion(client.runtime_version().transaction_version))
fn new(client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckTxVersion(
client.runtime_version().transaction_version(),
))
}
}
@@ -121,13 +132,9 @@ impl<T: Config> SignedExtension<T> for CheckTxVersion {
pub struct CheckGenesis<T: Config>(T::Hash);
impl<T: Config> ExtrinsicParams<T> for CheckGenesis<T> {
type OtherParams = ();
type Params = ();
fn new<Client: OfflineClientT<T>>(
_nonce: u64,
client: Client,
_other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
fn new(client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckGenesis(client.genesis_hash()))
}
}
@@ -152,16 +159,25 @@ pub struct CheckMortality<T: Config> {
}
/// Parameters to configure the [`CheckMortality`] signed extension.
pub struct CheckMortalityParams<T: Config> {
pub struct CheckMortalityParams<T: Config>(Option<CheckMortalityParamsInner<T>>);
struct CheckMortalityParamsInner<T: Config> {
era: Era,
checkpoint: Option<T::Hash>,
}
impl<T: Config> Default for CheckMortalityParams<T> {
fn default() -> Self {
Self {
era: Default::default(),
checkpoint: Default::default(),
CheckMortalityParams(None)
}
}
impl<T: Config> RefineParams<T> for CheckMortalityParams<T> {
fn refine(&mut self, data: &RefineParamsData<T>) {
if self.0.is_none() {
// By default we refine the params to have a mortal transaction valid for 32 blocks.
const TX_VALID_FOR: u64 = 32;
*self =
CheckMortalityParams::mortal(TX_VALID_FOR, data.block_number(), data.block_hash());
}
}
}
@@ -172,32 +188,36 @@ impl<T: Config> CheckMortalityParams<T> {
/// `block_hash` should both point to the same block, and are the block that
/// the transaction is mortal from.
pub fn mortal(period: u64, block_number: u64, block_hash: T::Hash) -> Self {
CheckMortalityParams {
Self(Some(CheckMortalityParamsInner {
era: Era::mortal(period, block_number),
checkpoint: Some(block_hash),
}
}))
}
/// An immortal transaction.
pub fn immortal() -> Self {
CheckMortalityParams {
Self(Some(CheckMortalityParamsInner {
era: Era::Immortal,
checkpoint: None,
}
}))
}
}
impl<T: Config> ExtrinsicParams<T> for CheckMortality<T> {
type OtherParams = CheckMortalityParams<T>;
type Params = CheckMortalityParams<T>;
fn new<Client: OfflineClientT<T>>(
_nonce: u64,
client: Client,
other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckMortality {
era: other_params.era,
checkpoint: other_params.checkpoint.unwrap_or(client.genesis_hash()),
})
fn new(client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
let check_mortality = if let Some(params) = params.0 {
CheckMortality {
era: params.era,
checkpoint: params.checkpoint.unwrap_or(client.genesis_hash()),
}
} else {
CheckMortality {
era: Era::Immortal,
checkpoint: client.genesis_hash(),
}
};
Ok(check_mortality)
}
}
@@ -218,8 +238,8 @@ impl<T: Config> SignedExtension<T> for CheckMortality<T> {
}
/// The [`ChargeAssetTxPayment`] signed extension.
#[derive(Derivative, DecodeAsType)]
#[derivative(Clone(bound = "T::AssetId: Clone"), Debug(bound = "T::AssetId: Debug"))]
#[derive(DecodeAsType)]
#[derive_where(Clone, Debug; T::AssetId)]
#[decode_as_type(trait_bounds = "T::AssetId: DecodeAsType")]
pub struct ChargeAssetTxPayment<T: Config> {
tip: Compact<u128>,
@@ -278,20 +298,18 @@ impl<T: Config> ChargeAssetTxPaymentParams<T> {
}
impl<T: Config> ExtrinsicParams<T> for ChargeAssetTxPayment<T> {
type OtherParams = ChargeAssetTxPaymentParams<T>;
type Params = ChargeAssetTxPaymentParams<T>;
fn new<Client: OfflineClientT<T>>(
_nonce: u64,
_client: Client,
other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
fn new(_client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(ChargeAssetTxPayment {
tip: Compact(other_params.tip),
asset_id: other_params.asset_id,
tip: Compact(params.tip),
asset_id: params.asset_id,
})
}
}
impl<T: Config> RefineParams<T> for ChargeAssetTxPaymentParams<T> {}
impl<T: Config> ExtrinsicParamsEncoder for ChargeAssetTxPayment<T> {
fn encode_extra_to(&self, v: &mut Vec<u8>) {
(self.tip, &self.asset_id).encode_to(v);
@@ -336,19 +354,17 @@ impl ChargeTransactionPaymentParams {
}
impl<T: Config> ExtrinsicParams<T> for ChargeTransactionPayment {
type OtherParams = ChargeTransactionPaymentParams;
type Params = ChargeTransactionPaymentParams;
fn new<Client: OfflineClientT<T>>(
_nonce: u64,
_client: Client,
other_params: Self::OtherParams,
) -> Result<Self, ExtrinsicParamsError> {
fn new(_client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(ChargeTransactionPayment {
tip: Compact(other_params.tip),
tip: Compact(params.tip),
})
}
}
impl<T: Config> RefineParams<T> for ChargeTransactionPaymentParams {}
impl ExtrinsicParamsEncoder for ChargeTransactionPayment {
fn encode_extra_to(&self, v: &mut Vec<u8>) {
self.tip.encode_to(v);
@@ -367,7 +383,7 @@ impl<T: Config> SignedExtension<T> for ChargeTransactionPayment {
/// is a sensible default, and allows for a single configuration to work across multiple chains.
pub struct AnyOf<T, Params> {
params: Vec<Box<dyn ExtrinsicParamsEncoder>>,
_marker: std::marker::PhantomData<(T, Params)>,
_marker: core::marker::PhantomData<(T, Params)>,
}
macro_rules! impl_tuples {
@@ -380,12 +396,11 @@ macro_rules! impl_tuples {
T: Config,
$($ident: SignedExtension<T>,)+
{
type OtherParams = ($($ident::OtherParams,)+);
type Params = ($($ident::Params,)+);
fn new<Client: OfflineClientT<T>>(
nonce: u64,
client: Client,
other_params: Self::OtherParams,
fn new(
client: &ClientState<T>,
params: Self::Params,
) -> Result<Self, ExtrinsicParamsError> {
let metadata = client.metadata();
let types = metadata.types();
@@ -401,7 +416,7 @@ macro_rules! impl_tuples {
}
// Break and record as soon as we find a match:
if $ident::matches(e.identifier(), e.extra_ty(), types) {
let ext = $ident::new(nonce, client.clone(), other_params.$index)?;
let ext = $ident::new(client, params.$index)?;
let boxed_ext: Box<dyn ExtrinsicParamsEncoder> = Box::new(ext);
exts_by_index.insert(idx, boxed_ext);
break
@@ -424,7 +439,7 @@ macro_rules! impl_tuples {
Ok(AnyOf {
params,
_marker: std::marker::PhantomData
_marker: core::marker::PhantomData
})
}
}
@@ -5,6 +5,8 @@
//! Substrate specific configuration
use super::{Config, DefaultExtrinsicParams, DefaultExtrinsicParamsBuilder, Hasher, Header};
use alloc::format;
use alloc::vec::Vec;
use codec::{Decode, Encode};
use serde::{Deserialize, Serialize};
@@ -42,7 +44,7 @@ pub struct BlakeTwo256;
impl Hasher for BlakeTwo256 {
type Output = H256;
fn hash(s: &[u8]) -> Self::Output {
sp_core_hashing::blake2_256(s).into()
sp_crypto_hashing::blake2_256(s).into()
}
}
@@ -225,12 +227,69 @@ where
{
// At the time of writing, Smoldot gives back block numbers in numeric rather
// than hex format. So let's support deserializing from both here:
use crate::backend::legacy::rpc_methods::NumberOrHex;
let number_or_hex = NumberOrHex::deserialize(d)?;
let u256 = number_or_hex.into_u256();
TryFrom::try_from(u256).map_err(|_| serde::de::Error::custom("Try from failed"))
}
/// A number type that can be serialized both as a number or a string that encodes a number in a
/// string.
///
/// We allow two representations of the block number as input. Either we deserialize to the type
/// that is specified in the block type or we attempt to parse given hex value.
///
/// The primary motivation for having this type is to avoid overflows when using big integers in
/// JavaScript (which we consider as an important RPC API consumer).
#[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
#[serde(untagged)]
pub enum NumberOrHex {
/// The number represented directly.
Number(u64),
/// Hex representation of the number.
Hex(U256),
}
impl NumberOrHex {
/// Converts this number into an U256.
pub fn into_u256(self) -> U256 {
match self {
NumberOrHex::Number(n) => n.into(),
NumberOrHex::Hex(h) => h,
}
}
}
impl From<NumberOrHex> for U256 {
fn from(num_or_hex: NumberOrHex) -> U256 {
num_or_hex.into_u256()
}
}
macro_rules! into_number_or_hex {
($($t: ty)+) => {
$(
impl From<$t> for NumberOrHex {
fn from(x: $t) -> Self {
NumberOrHex::Number(x.into())
}
}
)+
}
}
into_number_or_hex!(u8 u16 u32 u64);
impl From<u128> for NumberOrHex {
fn from(n: u128) -> Self {
NumberOrHex::Hex(n.into())
}
}
impl From<U256> for NumberOrHex {
fn from(n: U256) -> Self {
NumberOrHex::Hex(n)
}
}
#[cfg(test)]
mod test {
use super::*;
@@ -2,9 +2,11 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::{dynamic::DecodedValueThunk, metadata::DecodeWithMetadata};
use derivative::Derivative;
use std::borrow::Cow;
use crate::dynamic::DecodedValueThunk;
use crate::metadata::DecodeWithMetadata;
use alloc::borrow::Cow;
use alloc::string::String;
use derive_where::derive_where;
/// This represents a constant address. Anything implementing this trait
/// can be used to fetch constants.
@@ -27,13 +29,12 @@ pub trait ConstantAddress {
}
/// This represents the address of a constant.
#[derive(Derivative)]
#[derivative(Clone(bound = ""), Debug(bound = ""))]
#[derive_where(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct Address<ReturnTy> {
pallet_name: Cow<'static, str>,
constant_name: Cow<'static, str>,
constant_hash: Option<[u8; 32]>,
_marker: std::marker::PhantomData<ReturnTy>,
_marker: core::marker::PhantomData<ReturnTy>,
}
/// The type of address typically used to return dynamic constant values.
@@ -46,7 +47,7 @@ impl<ReturnTy> Address<ReturnTy> {
pallet_name: Cow::Owned(pallet_name.into()),
constant_name: Cow::Owned(constant_name.into()),
constant_hash: None,
_marker: std::marker::PhantomData,
_marker: core::marker::PhantomData,
}
}
@@ -62,7 +63,7 @@ impl<ReturnTy> Address<ReturnTy> {
pallet_name: Cow::Borrowed(pallet_name),
constant_name: Cow::Borrowed(constant_name),
constant_hash: Some(hash),
_marker: std::marker::PhantomData,
_marker: core::marker::PhantomData,
}
}
+59
View File
@@ -0,0 +1,59 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Types associated with accessing constants.
mod constant_address;
pub use constant_address::{dynamic, Address, ConstantAddress, DynamicAddress};
use alloc::borrow::ToOwned;
use crate::{
error::MetadataError,
metadata::{DecodeWithMetadata, MetadataExt},
Error, Metadata,
};
/// Run validation logic against some constant address you'd like to access. Returns `Ok(())`
/// if the address is valid (or if it's not possible to check since the address has no validation hash).
/// Return an error if the address was not valid or something went wrong trying to validate it (ie
/// the pallet or constant in question do not exist at all).
pub fn validate_constant<Address: ConstantAddress>(
metadata: &subxt_metadata::Metadata,
address: &Address,
) -> Result<(), Error> {
if let Some(actual_hash) = address.validation_hash() {
let expected_hash = metadata
.pallet_by_name_err(address.pallet_name())?
.constant_hash(address.constant_name())
.ok_or_else(|| {
MetadataError::ConstantNameNotFound(address.constant_name().to_owned())
})?;
if actual_hash != expected_hash {
return Err(MetadataError::IncompatibleCodegen.into());
}
}
Ok(())
}
pub fn get_constant<Address: ConstantAddress>(
metadata: &Metadata,
address: &Address,
) -> Result<Address::Target, Error> {
// 1. Validate constant shape if hash given:
validate_constant(metadata, address)?;
// 2. Attempt to decode the constant into the type given:
let constant = metadata
.pallet_by_name_err(address.pallet_name())?
.constant_by_name(address.constant_name())
.ok_or_else(|| MetadataError::ConstantNameNotFound(address.constant_name().to_owned()))?;
let value = <Address::Target as DecodeWithMetadata>::decode_with_metadata(
&mut constant.value(),
constant.ty(),
metadata,
)?;
Ok(value)
}
@@ -1,8 +1,8 @@
use derivative::Derivative;
use std::marker::PhantomData;
use derive_where::derive_where;
use crate::dynamic::DecodedValueThunk;
use crate::metadata::DecodeWithMetadata;
use crate::utils::Yes;
/// This represents the address of a custom value in in the metadata.
/// Anything, that implements the [CustomValueAddress] trait can be used, to fetch
@@ -33,16 +33,12 @@ impl CustomValueAddress for str {
}
}
/// Used to signal whether a [`CustomValueAddress`] can be decoded.
pub struct Yes;
/// A static address to a custom value.
#[derive(Derivative)]
#[derivative(Clone(bound = ""), Debug(bound = ""))]
#[derive_where(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct StaticAddress<ReturnTy, IsDecodable> {
name: &'static str,
hash: Option<[u8; 32]>,
phantom: PhantomData<(ReturnTy, IsDecodable)>,
phantom: core::marker::PhantomData<(ReturnTy, IsDecodable)>,
}
impl<ReturnTy, IsDecodable> StaticAddress<ReturnTy, IsDecodable> {
@@ -52,7 +48,7 @@ impl<ReturnTy, IsDecodable> StaticAddress<ReturnTy, IsDecodable> {
StaticAddress::<ReturnTy, IsDecodable> {
name,
hash: Some(hash),
phantom: PhantomData,
phantom: core::marker::PhantomData,
}
}
+156
View File
@@ -0,0 +1,156 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Types associated with accessing custom types
mod custom_value_address;
use crate::utils::Yes;
pub use custom_value_address::{CustomValueAddress, StaticAddress};
use crate::{
error::MetadataError,
metadata::{DecodeWithMetadata, MetadataExt},
Error, Metadata,
};
use alloc::vec::Vec;
/// Run the validation logic against some custom value address you'd like to access. Returns `Ok(())`
/// if the address is valid (or if it's not possible to check since the address has no validation hash).
/// Returns an error if the address was not valid (wrong name, type or raw bytes)
pub fn validate_custom_value<Address: CustomValueAddress + ?Sized>(
metadata: &Metadata,
address: &Address,
) -> Result<(), Error> {
if let Some(actual_hash) = address.validation_hash() {
let custom = metadata.custom();
let custom_value = custom
.get(address.name())
.ok_or_else(|| MetadataError::CustomValueNameNotFound(address.name().into()))?;
let expected_hash = custom_value.hash();
if actual_hash != expected_hash {
return Err(MetadataError::IncompatibleCodegen.into());
}
}
if metadata.custom().get(address.name()).is_none() {
return Err(MetadataError::IncompatibleCodegen.into());
}
Ok(())
}
/// Access a custom value by the address it is registered under. This can be just a [str] to get back a dynamic value,
/// or a static address from the generated static interface to get a value of a static type returned.
pub fn get_custom_value<Address: CustomValueAddress<IsDecodable = Yes> + ?Sized>(
metadata: &Metadata,
address: &Address,
) -> Result<Address::Target, Error> {
// 1. Validate custom value shape if hash given:
validate_custom_value(metadata, address)?;
// 2. Attempt to decode custom value:
let custom_value = metadata.custom_value_by_name_err(address.name())?;
let value = <Address::Target as DecodeWithMetadata>::decode_with_metadata(
&mut custom_value.bytes(),
custom_value.type_id(),
metadata,
)?;
Ok(value)
}
/// Access the bytes of a custom value by the address it is registered under.
pub fn get_custom_value_bytes<Address: CustomValueAddress + ?Sized>(
metadata: &Metadata,
address: &Address,
) -> Result<Vec<u8>, Error> {
// 1. Validate custom value shape if hash given:
validate_custom_value(metadata, address)?;
// 2. Return the underlying bytes:
let custom_value = metadata.custom_value_by_name_err(address.name())?;
Ok(custom_value.bytes().to_vec())
}
#[cfg(test)]
mod tests {
use alloc::collections::BTreeMap;
use codec::Encode;
use scale_decode::DecodeAsType;
use scale_info::form::PortableForm;
use scale_info::TypeInfo;
use alloc::borrow::ToOwned;
use alloc::string::String;
use alloc::vec;
use crate::custom_values::get_custom_value;
use crate::Metadata;
#[derive(Debug, Clone, PartialEq, Eq, Encode, TypeInfo, DecodeAsType)]
pub struct Person {
age: u16,
name: String,
}
fn mock_metadata() -> Metadata {
let person_ty = scale_info::MetaType::new::<Person>();
let unit = scale_info::MetaType::new::<()>();
let mut types = scale_info::Registry::new();
let person_ty_id = types.register_type(&person_ty);
let unit_id = types.register_type(&unit);
let types: scale_info::PortableRegistry = types.into();
let person = Person {
age: 42,
name: "Neo".into(),
};
let person_value_metadata: frame_metadata::v15::CustomValueMetadata<PortableForm> =
frame_metadata::v15::CustomValueMetadata {
ty: person_ty_id,
value: person.encode(),
};
let frame_metadata = frame_metadata::v15::RuntimeMetadataV15 {
types,
pallets: vec![],
extrinsic: frame_metadata::v15::ExtrinsicMetadata {
version: 0,
address_ty: unit_id,
call_ty: unit_id,
signature_ty: unit_id,
extra_ty: unit_id,
signed_extensions: vec![],
},
ty: unit_id,
apis: vec![],
outer_enums: frame_metadata::v15::OuterEnums {
call_enum_ty: unit_id,
event_enum_ty: unit_id,
error_enum_ty: unit_id,
},
custom: frame_metadata::v15::CustomMetadata {
map: BTreeMap::from_iter([("Mr. Robot".to_owned(), person_value_metadata)]),
},
};
let metadata: subxt_metadata::Metadata = frame_metadata.try_into().unwrap();
Metadata::new(metadata)
}
#[test]
fn test_decoding() {
let metadata = mock_metadata();
assert!(get_custom_value(&metadata, "Invalid Address").is_err());
let person_decoded_value_thunk = get_custom_value(&metadata, "Mr. Robot").unwrap();
let person: Person = person_decoded_value_thunk.as_type().unwrap();
assert_eq!(
person,
Person {
age: 42,
name: "Neo".into()
}
)
}
}
+8 -11
View File
@@ -5,19 +5,16 @@
//! This module provides the entry points to create dynamic
//! transactions, storage and constant lookups.
use crate::{
error::Error,
metadata::{DecodeWithMetadata, Metadata},
};
use crate::metadata::{DecodeWithMetadata, Metadata};
use alloc::vec::Vec;
use scale_decode::DecodeAsType;
pub use scale_value::{At, Value};
/// A [`scale_value::Value`] type endowed with contextual information
/// regarding what type was used to decode each part of it. This implements
/// [`crate::metadata::DecodeWithMetadata`], and is used as a return type
/// for dynamic requests.
pub type DecodedValue = scale_value::Value<scale_value::scale::TypeId>;
pub type DecodedValue = scale_value::Value<u32>;
// Submit dynamic transactions.
pub use crate::tx::dynamic as tx;
@@ -45,7 +42,7 @@ impl DecodeWithMetadata for DecodedValueThunk {
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<Self, Error> {
) -> Result<Self, scale_decode::Error> {
let mut v = Vec::with_capacity(bytes.len());
v.extend_from_slice(bytes);
*bytes = &[];
@@ -67,10 +64,10 @@ impl DecodedValueThunk {
&self.scale_bytes
}
/// Decode the SCALE encoded storage entry into a dynamic [`DecodedValue`] type.
pub fn to_value(&self) -> Result<DecodedValue, Error> {
let val = DecodedValue::decode_as_type(
pub fn to_value(&self) -> Result<DecodedValue, scale_decode::Error> {
let val = scale_value::scale::decode_as_type(
&mut &*self.scale_bytes,
self.type_id,
&self.type_id,
self.metadata.types(),
)?;
Ok(val)
@@ -79,7 +76,7 @@ impl DecodedValueThunk {
pub fn as_type<T: DecodeAsType>(&self) -> Result<T, scale_decode::Error> {
T::decode_as_type(
&mut &self.scale_bytes[..],
self.type_id,
&self.type_id,
self.metadata.types(),
)
}
+184
View File
@@ -0,0 +1,184 @@
use alloc::boxed::Box;
use alloc::string::String;
use derive_more::{Display, From};
use subxt_metadata::StorageHasher;
#[derive(Debug, Display, From)]
pub enum Error {
/// Codec error.
#[display(fmt = "Scale codec error: {_0}")]
Codec(codec::Error),
#[display(fmt = "Metadata Error: {_0}")]
Metadata(MetadataError),
#[display(fmt = "Storage Error: {_0}")]
StorageAddress(StorageAddressError),
/// Error decoding to a [`crate::dynamic::Value`].
#[display(fmt = "Error decoding into dynamic value: {_0}")]
Decode(scale_decode::Error),
/// Error encoding from a [`crate::dynamic::Value`].
#[display(fmt = "Error encoding from dynamic value: {_0}")]
Encode(scale_encode::Error),
/// Error constructing the appropriate extrinsic params.
#[display(fmt = "Extrinsic params error: {_0}")]
ExtrinsicParams(ExtrinsicParamsError),
}
impl From<scale_decode::visitor::DecodeError> for Error {
fn from(value: scale_decode::visitor::DecodeError) -> Self {
Error::Decode(value.into())
}
}
#[cfg(feature = "std")]
impl std::error::Error for Error {}
/// Something went wrong trying to access details in the metadata.
#[derive(Clone, Debug, PartialEq, Display)]
#[non_exhaustive]
pub enum MetadataError {
/// The DispatchError type isn't available in the metadata
#[display(fmt = "The DispatchError type isn't available")]
DispatchErrorNotFound,
/// Type not found in metadata.
#[display(fmt = "Type with ID {_0} not found")]
TypeNotFound(u32),
/// Pallet not found (index).
#[display(fmt = "Pallet with index {_0} not found")]
PalletIndexNotFound(u8),
/// Pallet not found (name).
#[display(fmt = "Pallet with name {_0} not found")]
PalletNameNotFound(String),
/// Variant not found.
#[display(fmt = "Variant with index {_0} not found")]
VariantIndexNotFound(u8),
/// Constant not found.
#[display(fmt = "Constant with name {_0} not found")]
ConstantNameNotFound(String),
/// Call not found.
#[display(fmt = "Call with name {_0} not found")]
CallNameNotFound(String),
/// Runtime trait not found.
#[display(fmt = "Runtime trait with name {_0} not found")]
RuntimeTraitNotFound(String),
/// Runtime method not found.
#[display(fmt = "Runtime method with name {_0} not found")]
RuntimeMethodNotFound(String),
/// Call type not found in metadata.
#[display(fmt = "Call type not found in pallet with index {_0}")]
CallTypeNotFoundInPallet(u8),
/// Event type not found in metadata.
#[display(fmt = "Event type not found in pallet with index {_0}")]
EventTypeNotFoundInPallet(u8),
/// Storage details not found in metadata.
#[display(fmt = "Storage details not found in pallet with name {_0}")]
StorageNotFoundInPallet(String),
/// Storage entry not found.
#[display(fmt = "Storage entry {_0} not found")]
StorageEntryNotFound(String),
/// The generated interface used is not compatible with the node.
#[display(fmt = "The generated code is not compatible with the node")]
IncompatibleCodegen,
/// Custom value not found.
#[display(fmt = "Custom value with name {_0} not found")]
CustomValueNameNotFound(String),
}
#[cfg(feature = "std")]
impl std::error::Error for MetadataError {}
/// Something went wrong trying to encode or decode a storage address.
#[derive(Clone, Debug, Display)]
#[non_exhaustive]
pub enum StorageAddressError {
/// Storage lookup does not have the expected number of keys.
#[display(fmt = "Storage lookup requires {expected} keys but more keys have been provided.")]
TooManyKeys {
/// The number of keys provided in the storage address.
expected: usize,
},
/// This storage entry in the metadata does not have the correct number of hashers to fields.
#[display(
fmt = "Storage entry in metadata does not have the correct number of hashers to fields"
)]
WrongNumberOfHashers {
/// The number of hashers in the metadata for this storage entry.
hashers: usize,
/// The number of fields in the metadata for this storage entry.
fields: usize,
},
/// We weren't given enough bytes to decode the storage address/key.
#[display(fmt = "Not enough remaining bytes to decode the storage address/key")]
NotEnoughBytes,
/// We have leftover bytes after decoding the storage address.
#[display(fmt = "We have leftover bytes after decoding the storage address")]
TooManyBytes,
/// The bytes of a storage address are not the expected address for decoding the storage keys of the address.
#[display(
fmt = "Storage address bytes are not the expected format. Addresses need to be at least 16 bytes (pallet ++ entry) and follow a structure given by the hashers defined in the metadata"
)]
UnexpectedAddressBytes,
/// An invalid hasher was used to reconstruct a value from a chunk of bytes that is part of a storage address. Hashers where the hash does not contain the original value are invalid for this purpose.
#[display(
fmt = "An invalid hasher was used to reconstruct a value with type ID {ty_id} from a hash formed by a {hasher:?} hasher. This is only possible for concat-style hashers or the identity hasher"
)]
HasherCannotReconstructKey {
/// Type id of the key's type.
ty_id: u32,
/// The invalid hasher that caused this error.
hasher: StorageHasher,
},
}
#[cfg(feature = "std")]
impl std::error::Error for StorageAddressError {}
/// An error that can be emitted when trying to construct an instance of [`crate::config::ExtrinsicParams`],
/// encode data from the instance, or match on signed extensions.
#[derive(Display, Debug)]
#[non_exhaustive]
pub enum ExtrinsicParamsError {
/// Cannot find a type id in the metadata. The context provides some additional
/// information about the source of the error (eg the signed extension name).
#[display(fmt = "Cannot find type id '{type_id} in the metadata (context: {context})")]
MissingTypeId {
/// Type ID.
type_id: u32,
/// Some arbitrary context to help narrow the source of the error.
context: &'static str,
},
/// A signed extension in use on some chain was not provided.
#[display(
fmt = "The chain expects a signed extension with the name {_0}, but we did not provide one"
)]
UnknownSignedExtension(String),
/// Some custom error.
#[display(fmt = "Error constructing extrinsic parameters: {_0}")]
Custom(Box<dyn CustomError>),
}
/// Anything implementing this trait can be used in [`ExtrinsicParamsError::Custom`].
#[cfg(feature = "std")]
pub trait CustomError: std::error::Error + Send + Sync + 'static {}
#[cfg(feature = "std")]
impl<T: std::error::Error + Send + Sync + 'static> CustomError for T {}
/// Anything implementing this trait can be used in [`ExtrinsicParamsError::Custom`].
#[cfg(not(feature = "std"))]
pub trait CustomError: core::fmt::Debug + core::fmt::Display + Send + Sync + 'static {}
#[cfg(not(feature = "std"))]
impl<T: core::fmt::Debug + core::fmt::Display + Send + Sync + 'static> CustomError for T {}
#[cfg(feature = "std")]
impl std::error::Error for ExtrinsicParamsError {}
impl From<core::convert::Infallible> for ExtrinsicParamsError {
fn from(value: core::convert::Infallible) -> Self {
match value {}
}
}
impl From<Box<dyn CustomError>> for ExtrinsicParamsError {
fn from(value: Box<dyn CustomError>) -> Self {
ExtrinsicParamsError::Custom(value)
}
}
@@ -1,42 +1,48 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use alloc::sync::Arc;
use alloc::vec::Vec;
use codec::{Compact, Decode, Encode};
use derive_where::derive_where;
use scale_decode::{DecodeAsFields, DecodeAsType};
use subxt_metadata::PalletMetadata;
//! A representation of a block of events.
use crate::{error::MetadataError, Config, Error, Metadata};
use super::{Phase, StaticEvent};
use crate::{
client::OnlineClientT,
error::{Error, MetadataError},
events::events_client::get_event_bytes,
metadata::types::PalletMetadata,
Config, Metadata,
};
use codec::{Compact, Decode};
use derivative::Derivative;
use scale_decode::DecodeAsType;
use std::sync::Arc;
/// Trait to uniquely identify the events's identity from the runtime metadata.
///
/// Generated API structures that represent an event implement this trait.
///
/// The trait is utilized to decode emitted events from a block, via obtaining the
/// form of the `Event` from the metadata.
pub trait StaticEvent: DecodeAsFields {
/// Pallet name.
const PALLET: &'static str;
/// Event name.
const EVENT: &'static str;
/// Returns true if the given pallet and event names match this event.
fn is_event(pallet: &str, event: &str) -> bool {
Self::PALLET == pallet && Self::EVENT == event
}
}
/// A collection of events obtained from a block, bundled with the necessary
/// information needed to decode and iterate over them.
#[derive(Derivative)]
#[derivative(Clone(bound = ""))]
#[derive_where(Clone)]
pub struct Events<T: Config> {
metadata: Metadata,
block_hash: T::Hash,
// Note; raw event bytes are prefixed with a Compact<u32> containing
// the number of events to be decoded. The start_idx reflects that, so
// that we can skip over those bytes when decoding them
event_bytes: Arc<[u8]>,
start_idx: usize,
num_events: u32,
marker: core::marker::PhantomData<T>,
}
// Ignore the Metadata when debug-logging events; it's big and distracting.
impl<T: Config> std::fmt::Debug for Events<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl<T: Config> core::fmt::Debug for Events<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("Events")
.field("block_hash", &self.block_hash)
.field("event_bytes", &self.event_bytes)
.field("start_idx", &self.start_idx)
.field("num_events", &self.num_events)
@@ -45,7 +51,8 @@ impl<T: Config> std::fmt::Debug for Events<T> {
}
impl<T: Config> Events<T> {
pub(crate) fn new(metadata: Metadata, block_hash: T::Hash, event_bytes: Vec<u8>) -> Self {
/// Create a new [`Events`] instance from the given bytes.
pub fn decode_from(metadata: Metadata, event_bytes: Vec<u8>) -> Self {
// event_bytes is a SCALE encoded vector of events. So, pluck the
// compact encoded length from the front, leaving the remaining bytes
// for our iterating to decode.
@@ -60,34 +67,13 @@ impl<T: Config> Events<T> {
Self {
metadata,
block_hash,
event_bytes: event_bytes.into(),
start_idx,
num_events,
marker: core::marker::PhantomData,
}
}
/// Obtain the events from a block hash given custom metadata and a client.
///
/// # Notes
///
/// - Prefer to use [`crate::events::EventsClient::at`] to obtain the events.
/// - Subxt may fail to decode things that aren't from a runtime using the
/// latest metadata version.
/// - The client may not be able to obtain the block at the given hash. Only
/// archive nodes keep hold of all past block information.
pub async fn new_from_client<Client>(
metadata: Metadata,
block_hash: T::Hash,
client: Client,
) -> Result<Self, Error>
where
Client: OnlineClientT<T>,
{
let event_bytes = get_event_bytes(client.backend(), block_hash).await?;
Ok(Events::new(metadata, block_hash, event_bytes))
}
/// The number of events.
pub fn len(&self) -> u32 {
self.num_events
@@ -99,11 +85,6 @@ impl<T: Config> Events<T> {
self.num_events == 0
}
/// Return the block hash that these events are from.
pub fn block_hash(&self) -> T::Hash {
self.block_hash
}
/// Iterate over all of the events, using metadata to dynamically
/// decode them as we go, and returning the raw bytes and other associated
/// details. If an error occurs, all subsequent iterations return `None`.
@@ -119,7 +100,7 @@ impl<T: Config> Events<T> {
let mut pos = self.start_idx;
let mut index = 0;
std::iter::from_fn(move || {
core::iter::from_fn(move || {
if event_bytes.len() <= pos || num_events == index {
None
} else {
@@ -172,6 +153,17 @@ impl<T: Config> Events<T> {
}
}
/// A phase of a block's execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Decode, Encode)]
pub enum Phase {
/// Applying an extrinsic.
ApplyExtrinsic(u32),
/// Finalizing the block.
Finalization,
/// Initializing the block.
Initialization,
}
/// The event details.
#[derive(Debug, Clone)]
pub struct EventDetails<T: Config> {
@@ -194,7 +186,7 @@ pub struct EventDetails<T: Config> {
}
impl<T: Config> EventDetails<T> {
// Attempt to dynamically decode a single event from our events input.
/// Attempt to dynamically decode a single event from our events input.
fn decode_from(
metadata: Metadata,
all_bytes: Arc<[u8]>,
@@ -228,9 +220,9 @@ impl<T: Config> EventDetails<T> {
// Skip over the bytes for this field:
scale_decode::visitor::decode_with_visitor(
input,
field_metadata.ty.id,
&field_metadata.ty.id,
metadata.types(),
scale_decode::visitor::IgnoreVisitor,
scale_decode::visitor::IgnoreVisitor::new(),
)
.map_err(scale_decode::Error::from)?;
}
@@ -284,12 +276,12 @@ impl<T: Config> EventDetails<T> {
/// The name of the pallet from whence the Event originated.
pub fn pallet_name(&self) -> &str {
self.event_metadata().pallet.name()
self.event_metadata().pallet().name()
}
/// The name of the event (ie the name of the variant that it corresponds to).
pub fn variant_name(&self) -> &str {
&self.event_metadata().variant.name
&self.event_metadata().variant().name
}
/// Fetch details from the metadata for this event.
@@ -321,9 +313,7 @@ impl<T: Config> EventDetails<T> {
/// Decode and provide the event fields back in the form of a [`scale_value::Composite`]
/// type which represents the named or unnamed fields that were present in the event.
pub fn field_values(
&self,
) -> Result<scale_value::Composite<scale_value::scale::TypeId>, Error> {
pub fn field_values(&self) -> Result<scale_value::Composite<u32>, Error> {
let bytes = &mut self.field_bytes();
let event_metadata = self.event_metadata();
@@ -331,14 +321,10 @@ impl<T: Config> EventDetails<T> {
.variant
.fields
.iter()
.map(|f| scale_decode::Field::new(f.ty.id, f.name.as_deref()));
.map(|f| scale_decode::Field::new(&f.ty.id, f.name.as_deref()));
use scale_decode::DecodeAsFields;
let decoded = <scale_value::Composite<scale_value::scale::TypeId>>::decode_as_fields(
bytes,
&mut fields,
self.metadata.types(),
)?;
let decoded =
scale_value::scale::decode_as_fields(bytes, &mut fields, self.metadata.types())?;
Ok(decoded)
}
@@ -352,7 +338,7 @@ impl<T: Config> EventDetails<T> {
.variant
.fields
.iter()
.map(|f| scale_decode::Field::new(f.ty.id, f.name.as_deref()));
.map(|f| scale_decode::Field::new(&f.ty.id, f.name.as_deref()));
let decoded =
E::decode_as_fields(&mut self.field_bytes(), &mut fields, self.metadata.types())?;
Ok(Some(decoded))
@@ -369,7 +355,7 @@ impl<T: Config> EventDetails<T> {
let decoded = E::decode_as_type(
&mut &bytes[..],
self.metadata.outer_enums().event_enum_ty(),
&self.metadata.outer_enums().event_enum_ty(),
self.metadata.types(),
)?;
@@ -384,15 +370,24 @@ impl<T: Config> EventDetails<T> {
/// Details for the given event plucked from the metadata.
pub struct EventMetadataDetails<'a> {
pub pallet: PalletMetadata<'a>,
pub variant: &'a scale_info::Variant<scale_info::form::PortableForm>,
pallet: PalletMetadata<'a>,
variant: &'a scale_info::Variant<scale_info::form::PortableForm>,
}
impl<'a> EventMetadataDetails<'a> {
pub fn pallet(&self) -> PalletMetadata<'a> {
self.pallet
}
pub fn variant(&self) -> &'a scale_info::Variant<scale_info::form::PortableForm> {
self.variant
}
}
/// Event related test utilities used outside this module.
#[cfg(test)]
pub(crate) mod test_utils {
use super::*;
use crate::{Config, SubstrateConfig};
use crate::config::{Config, SubstrateConfig};
use codec::Encode;
use frame_metadata::{
v15::{
@@ -537,11 +532,7 @@ pub(crate) mod test_utils {
// Prepend compact encoded length to event bytes:
let mut all_event_bytes = Compact(num_events).encode();
all_event_bytes.extend(event_bytes);
Events::new(
metadata,
<SubstrateConfig as Config>::Hash::default(),
all_event_bytes,
)
Events::decode_from(metadata, all_event_bytes)
}
}
@@ -551,7 +542,8 @@ mod tests {
test_utils::{event_record, events, events_raw, AllEvents, EventRecord},
*,
};
use crate::SubstrateConfig;
use crate::config::SubstrateConfig;
use crate::events::Phase;
use codec::Encode;
use primitive_types::H256;
use scale_info::TypeInfo;
+42
View File
@@ -0,0 +1,42 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! # Subxt-core
//!
//! `#[no_std]` compatible core crate for subxt.
#![cfg_attr(not(feature = "std"), no_std)]
pub extern crate alloc;
pub mod blocks;
pub mod client;
pub mod config;
pub mod constants;
pub mod custom_values;
pub mod dynamic;
pub mod error;
pub mod events;
pub mod metadata;
pub mod runtime_api;
pub mod storage;
pub mod tx;
pub mod utils;
pub use config::Config;
pub use error::Error;
pub use metadata::Metadata;
#[macro_use]
mod macros;
pub mod ext {
pub use codec;
pub use scale_decode;
pub use scale_encode;
cfg_substrate_compat! {
pub use sp_runtime;
pub use sp_core;
}
}
+17
View File
@@ -0,0 +1,17 @@
macro_rules! cfg_feature {
($feature:literal, $($item:item)*) => {
$(
#[cfg(feature = $feature)]
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
$item
)*
}
}
macro_rules! cfg_substrate_compat {
($($item:item)*) => {
crate::macros::cfg_feature!("substrate-compat", $($item)*);
};
}
pub(crate) use {cfg_feature, cfg_substrate_compat};
@@ -3,7 +3,8 @@
// see LICENSE for license details.
use super::Metadata;
use crate::error::Error;
use alloc::vec::Vec;
/// This trait is implemented for all types that also implement [`scale_decode::DecodeAsType`].
pub trait DecodeWithMetadata: Sized {
@@ -12,7 +13,7 @@ pub trait DecodeWithMetadata: Sized {
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<Self, Error>;
) -> Result<Self, scale_decode::Error>;
}
impl<T: scale_decode::DecodeAsType> DecodeWithMetadata for T {
@@ -20,8 +21,8 @@ impl<T: scale_decode::DecodeAsType> DecodeWithMetadata for T {
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<T, Error> {
let val = T::decode_as_type(bytes, type_id, metadata.types())?;
) -> Result<T, scale_decode::Error> {
let val = T::decode_as_type(bytes, &type_id, metadata.types())?;
Ok(val)
}
}
@@ -34,7 +35,7 @@ pub trait EncodeWithMetadata {
type_id: u32,
metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error>;
) -> Result<(), scale_encode::Error>;
}
impl<T: scale_encode::EncodeAsType> EncodeWithMetadata for T {
@@ -44,8 +45,8 @@ impl<T: scale_encode::EncodeAsType> EncodeWithMetadata for T {
type_id: u32,
metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error> {
self.encode_as_type_to(type_id, metadata.types(), bytes)?;
) -> Result<(), scale_encode::Error> {
self.encode_as_type_to(&type_id, metadata.types(), bytes)?;
Ok(())
}
}
+137
View File
@@ -0,0 +1,137 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::error::MetadataError;
use alloc::borrow::ToOwned;
use alloc::sync::Arc;
/// A cheaply clone-able representation of the runtime metadata received from a node.
#[derive(Clone, Debug)]
pub struct Metadata {
inner: Arc<subxt_metadata::Metadata>,
}
impl core::ops::Deref for Metadata {
type Target = subxt_metadata::Metadata;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl Metadata {
pub fn new(md: subxt_metadata::Metadata) -> Self {
Metadata {
inner: Arc::new(md),
}
}
/// Identical to `metadata.pallet_by_name()`, but returns an error if the pallet is not found.
pub fn pallet_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::PalletMetadata, MetadataError> {
self.pallet_by_name(name)
.ok_or_else(|| MetadataError::PalletNameNotFound(name.to_owned()))
}
/// Identical to `metadata.pallet_by_index()`, but returns an error if the pallet is not found.
pub fn pallet_by_index_err(
&self,
index: u8,
) -> Result<subxt_metadata::PalletMetadata, MetadataError> {
self.pallet_by_index(index)
.ok_or(MetadataError::PalletIndexNotFound(index))
}
/// Identical to `metadata.runtime_api_trait_by_name()`, but returns an error if the trait is not found.
pub fn runtime_api_trait_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::RuntimeApiMetadata, MetadataError> {
self.runtime_api_trait_by_name(name)
.ok_or_else(|| MetadataError::RuntimeTraitNotFound(name.to_owned()))
}
}
impl From<subxt_metadata::Metadata> for Metadata {
fn from(md: subxt_metadata::Metadata) -> Self {
Metadata::new(md)
}
}
impl TryFrom<frame_metadata::RuntimeMetadataPrefixed> for Metadata {
type Error = subxt_metadata::TryFromError;
fn try_from(value: frame_metadata::RuntimeMetadataPrefixed) -> Result<Self, Self::Error> {
subxt_metadata::Metadata::try_from(value).map(Metadata::from)
}
}
impl codec::Decode for Metadata {
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
subxt_metadata::Metadata::decode(input).map(Metadata::new)
}
}
/// Some extension methods on [`subxt_metadata::Metadata`] that return Errors instead of Options.
pub trait MetadataExt {
fn pallet_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::PalletMetadata, MetadataError>;
fn pallet_by_index_err(
&self,
index: u8,
) -> Result<subxt_metadata::PalletMetadata, MetadataError>;
fn runtime_api_trait_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::RuntimeApiMetadata, MetadataError>;
fn custom_value_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::CustomValueMetadata, MetadataError>;
}
impl MetadataExt for subxt_metadata::Metadata {
/// Identical to `metadata.pallet_by_name()`, but returns an error if the pallet is not found.
fn pallet_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::PalletMetadata, MetadataError> {
self.pallet_by_name(name)
.ok_or_else(|| MetadataError::PalletNameNotFound(name.to_owned()))
}
/// Identical to `metadata.pallet_by_index()`, but returns an error if the pallet is not found.
fn pallet_by_index_err(
&self,
index: u8,
) -> Result<subxt_metadata::PalletMetadata, MetadataError> {
self.pallet_by_index(index)
.ok_or(MetadataError::PalletIndexNotFound(index))
}
/// Identical to `metadata.runtime_api_trait_by_name()`, but returns an error if the trait is not found.
fn runtime_api_trait_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::RuntimeApiMetadata, MetadataError> {
self.runtime_api_trait_by_name(name)
.ok_or_else(|| MetadataError::RuntimeTraitNotFound(name.to_owned()))
}
/// Identical to `metadata.runtime_api_trait_by_name()`, but returns an error if the trait is not found.
fn custom_value_by_name_err(
&self,
name: &str,
) -> Result<subxt_metadata::CustomValueMetadata, MetadataError> {
self.custom()
.get(name)
.ok_or_else(|| MetadataError::CustomValueNameNotFound(name.to_owned()))
}
}
@@ -8,7 +8,7 @@ mod decode_encode_traits;
mod metadata_type;
pub use decode_encode_traits::{DecodeWithMetadata, EncodeWithMetadata};
pub use metadata_type::Metadata;
pub use metadata_type::{Metadata, MetadataExt};
// Expose metadata types under a sub module in case somebody needs to reference them:
pub use subxt_metadata as types;
@@ -2,15 +2,20 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use alloc::borrow::Cow;
use alloc::borrow::ToOwned;
use alloc::string::String;
use alloc::vec::Vec;
use core::marker::PhantomData;
use derivative::Derivative;
use derive_where::derive_where;
use scale_encode::EncodeAsFields;
use scale_value::Composite;
use std::borrow::Cow;
use crate::dynamic::DecodedValueThunk;
use crate::error::MetadataError;
use crate::{metadata::DecodeWithMetadata, Error, Metadata};
use crate::Error;
use crate::metadata::{DecodeWithMetadata, Metadata};
/// This represents a runtime API payload that can call into the runtime of node.
///
@@ -66,11 +71,7 @@ pub trait RuntimeApiPayload {
///
/// This can be created from static values (ie those generated
/// via the `subxt` macro) or dynamic values via [`dynamic`].
#[derive(Derivative)]
#[derivative(
Clone(bound = "ArgsData: Clone"),
Debug(bound = "ArgsData: std::fmt::Debug")
)]
#[derive_where(Clone, Debug, Eq, Ord, PartialEq, PartialOrd; ArgsData)]
pub struct Payload<ArgsData, ReturnTy> {
trait_name: Cow<'static, str>,
method_name: Cow<'static, str>,
@@ -99,7 +100,7 @@ impl<ArgsData: EncodeAsFields, ReturnTy: DecodeWithMetadata> RuntimeApiPayload
.ok_or_else(|| MetadataError::RuntimeMethodNotFound((*self.method_name).to_owned()))?;
let mut fields = api_method
.inputs()
.map(|input| scale_encode::Field::named(input.ty, &input.name));
.map(|input| scale_encode::Field::named(&input.ty, &input.name));
self.args_data
.encode_as_fields_to(&mut fields, metadata.types(), out)?;
@@ -146,7 +147,7 @@ impl<ReturnTy, ArgsData> Payload<ArgsData, ReturnTy> {
method_name: Cow::Borrowed(method_name),
args_data,
validation_hash: Some(hash),
_marker: std::marker::PhantomData,
_marker: core::marker::PhantomData,
}
}
+22
View File
@@ -0,0 +1,22 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Types associated with accessing and working with storage items.
mod storage_address;
mod storage_key;
pub mod utils;
/// Types representing an address which describes where a storage
/// entry lives and how to properly decode it.
pub mod address {
pub use super::storage_address::{dynamic, Address, DynamicAddress, StorageAddress};
pub use super::storage_key::{StaticStorageKey, StorageHashers, StorageKey};
}
pub use storage_key::StorageKey;
// For consistency with other modules, also expose
// the basic address stuff at the root of the module.
pub use storage_address::{dynamic, Address, DynamicAddress, StorageAddress};
+174
View File
@@ -0,0 +1,174 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::{
dynamic::DecodedValueThunk,
error::{Error, MetadataError},
metadata::{DecodeWithMetadata, Metadata},
utils::Yes,
};
use derive_where::derive_where;
use alloc::borrow::{Cow, ToOwned};
use alloc::string::String;
use alloc::vec::Vec;
use super::{storage_key::StorageHashers, StorageKey};
/// This represents a storage address. Anything implementing this trait
/// can be used to fetch and iterate over storage entries.
pub trait StorageAddress {
/// The target type of the value that lives at this address.
type Target: DecodeWithMetadata;
/// The keys type used to construct this address.
type Keys: StorageKey;
/// Can an entry be fetched from this address?
/// Set this type to [`Yes`] to enable the corresponding calls to be made.
type IsFetchable;
/// Can a default entry be obtained from this address?
/// Set this type to [`Yes`] to enable the corresponding calls to be made.
type IsDefaultable;
/// Can this address be iterated over?
/// Set this type to [`Yes`] to enable the corresponding calls to be made.
type IsIterable;
/// The name of the pallet that the entry lives under.
fn pallet_name(&self) -> &str;
/// The name of the entry in a given pallet that the item is at.
fn entry_name(&self) -> &str;
/// Output the non-prefix bytes; that is, any additional bytes that need
/// to be appended to the key to dig into maps.
fn append_entry_bytes(&self, metadata: &Metadata, bytes: &mut Vec<u8>) -> Result<(), Error>;
/// An optional hash which, if present, will be checked against
/// the node metadata to confirm that the return type matches what
/// we are expecting.
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
/// A concrete storage address. This can be created from static values (ie those generated
/// via the `subxt` macro) or dynamic values via [`dynamic`].
#[derive_where(Clone, Debug, Eq, Ord, PartialEq, PartialOrd; Keys)]
pub struct Address<Keys: StorageKey, ReturnTy, Fetchable, Defaultable, Iterable> {
pallet_name: Cow<'static, str>,
entry_name: Cow<'static, str>,
keys: Keys,
validation_hash: Option<[u8; 32]>,
_marker: core::marker::PhantomData<(ReturnTy, Fetchable, Defaultable, Iterable)>,
}
/// A typical storage address constructed at runtime rather than via the `subxt` macro; this
/// has no restriction on what it can be used for (since we don't statically know).
pub type DynamicAddress<Keys> = Address<Keys, DecodedValueThunk, Yes, Yes, Yes>;
impl<Keys: StorageKey> DynamicAddress<Keys> {
/// Creates a new dynamic address. As `Keys` you can use a `Vec<scale_value::Value>`
pub fn new(pallet_name: impl Into<String>, entry_name: impl Into<String>, keys: Keys) -> Self {
Self {
pallet_name: Cow::Owned(pallet_name.into()),
entry_name: Cow::Owned(entry_name.into()),
keys,
validation_hash: None,
_marker: core::marker::PhantomData,
}
}
}
impl<Keys, ReturnTy, Fetchable, Defaultable, Iterable>
Address<Keys, ReturnTy, Fetchable, Defaultable, Iterable>
where
Keys: StorageKey,
ReturnTy: DecodeWithMetadata,
{
/// Create a new [`Address`] using static strings for the pallet and call name.
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
entry_name: &'static str,
keys: Keys,
hash: [u8; 32],
) -> Self {
Self {
pallet_name: Cow::Borrowed(pallet_name),
entry_name: Cow::Borrowed(entry_name),
keys,
validation_hash: Some(hash),
_marker: core::marker::PhantomData,
}
}
}
impl<Keys, ReturnTy, Fetchable, Defaultable, Iterable>
Address<Keys, ReturnTy, Fetchable, Defaultable, Iterable>
where
Keys: StorageKey,
ReturnTy: DecodeWithMetadata,
{
/// Do not validate this storage entry prior to accessing it.
pub fn unvalidated(self) -> Self {
Self {
validation_hash: None,
..self
}
}
/// Return bytes representing the root of this storage entry (a hash of the pallet and entry name).
pub fn to_root_bytes(&self) -> Vec<u8> {
super::utils::storage_address_root_bytes(self)
}
}
impl<Keys, ReturnTy, Fetchable, Defaultable, Iterable> StorageAddress
for Address<Keys, ReturnTy, Fetchable, Defaultable, Iterable>
where
Keys: StorageKey,
ReturnTy: DecodeWithMetadata,
{
type Target = ReturnTy;
type Keys = Keys;
type IsFetchable = Fetchable;
type IsDefaultable = Defaultable;
type IsIterable = Iterable;
fn pallet_name(&self) -> &str {
&self.pallet_name
}
fn entry_name(&self) -> &str {
&self.entry_name
}
fn append_entry_bytes(&self, metadata: &Metadata, bytes: &mut Vec<u8>) -> Result<(), Error> {
let pallet = metadata.pallet_by_name_err(self.pallet_name())?;
let storage = pallet
.storage()
.ok_or_else(|| MetadataError::StorageNotFoundInPallet(self.pallet_name().to_owned()))?;
let entry = storage
.entry_by_name(self.entry_name())
.ok_or_else(|| MetadataError::StorageEntryNotFound(self.entry_name().to_owned()))?;
let hashers = StorageHashers::new(entry.entry_type(), metadata.types())?;
self.keys
.encode_storage_key(bytes, &mut hashers.iter(), metadata.types())?;
Ok(())
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.validation_hash
}
}
/// Construct a new dynamic storage lookup.
pub fn dynamic<Keys: StorageKey>(
pallet_name: impl Into<String>,
entry_name: impl Into<String>,
storage_entry_keys: Keys,
) -> DynamicAddress<Keys> {
DynamicAddress::new(pallet_name, entry_name, storage_entry_keys)
}
+475
View File
@@ -0,0 +1,475 @@
use super::utils::hash_bytes;
use crate::{
error::{Error, MetadataError, StorageAddressError},
utils::{Encoded, Static},
};
use alloc::vec;
use alloc::vec::Vec;
use derive_where::derive_where;
use scale_decode::visitor::IgnoreVisitor;
use scale_encode::EncodeAsType;
use scale_info::{PortableRegistry, TypeDef};
use scale_value::Value;
use subxt_metadata::{StorageEntryType, StorageHasher};
/// A collection of storage hashers paired with the type ids of the types they should hash.
/// Can be created for each storage entry in the metadata via [`StorageHashers::new()`].
#[derive(Debug)]
pub struct StorageHashers {
hashers_and_ty_ids: Vec<(StorageHasher, u32)>,
}
impl StorageHashers {
/// Creates new [`StorageHashers`] from a storage entry. Looks at the [`StorageEntryType`] and
/// assigns a hasher to each type id that makes up the key.
pub fn new(storage_entry: &StorageEntryType, types: &PortableRegistry) -> Result<Self, Error> {
let mut hashers_and_ty_ids = vec![];
if let StorageEntryType::Map {
hashers, key_ty, ..
} = storage_entry
{
let ty = types
.resolve(*key_ty)
.ok_or(MetadataError::TypeNotFound(*key_ty))?;
if let TypeDef::Tuple(tuple) = &ty.type_def {
if hashers.len() == 1 {
// use the same hasher for all fields, if only 1 hasher present:
let hasher = hashers[0];
for f in tuple.fields.iter() {
hashers_and_ty_ids.push((hasher, f.id));
}
} else if hashers.len() < tuple.fields.len() {
return Err(StorageAddressError::WrongNumberOfHashers {
hashers: hashers.len(),
fields: tuple.fields.len(),
}
.into());
} else {
for (i, f) in tuple.fields.iter().enumerate() {
hashers_and_ty_ids.push((hashers[i], f.id));
}
}
} else {
if hashers.len() != 1 {
return Err(StorageAddressError::WrongNumberOfHashers {
hashers: hashers.len(),
fields: 1,
}
.into());
}
hashers_and_ty_ids.push((hashers[0], *key_ty));
};
}
Ok(Self { hashers_and_ty_ids })
}
/// Creates an iterator over the storage hashers and type ids.
pub fn iter(&self) -> StorageHashersIter<'_> {
StorageHashersIter {
hashers: self,
idx: 0,
}
}
}
/// An iterator over all type ids of the key and the respective hashers.
/// See [`StorageHashers::iter()`].
#[derive(Debug)]
pub struct StorageHashersIter<'a> {
hashers: &'a StorageHashers,
idx: usize,
}
impl<'a> StorageHashersIter<'a> {
fn next_or_err(&mut self) -> Result<(StorageHasher, u32), Error> {
self.next().ok_or_else(|| {
StorageAddressError::TooManyKeys {
expected: self.hashers.hashers_and_ty_ids.len(),
}
.into()
})
}
}
impl<'a> Iterator for StorageHashersIter<'a> {
type Item = (StorageHasher, u32);
fn next(&mut self) -> Option<Self::Item> {
let item = self.hashers.hashers_and_ty_ids.get(self.idx).copied()?;
self.idx += 1;
Some(item)
}
}
impl<'a> ExactSizeIterator for StorageHashersIter<'a> {
fn len(&self) -> usize {
self.hashers.hashers_and_ty_ids.len() - self.idx
}
}
/// This trait should be implemented by anything that can be used as one or multiple storage keys.
pub trait StorageKey {
/// Encodes the storage key into some bytes
fn encode_storage_key(
&self,
bytes: &mut Vec<u8>,
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<(), Error>;
/// Attempts to decode the StorageKey given some bytes and a set of hashers and type IDs that they are meant to represent.
/// The bytes passed to `decode` should start with:
/// - 1. some fixed size hash (for all hashers except `Identity`)
/// - 2. the plain key value itself (for `Identity`, `Blake2_128Concat` and `Twox64Concat` hashers)
fn decode_storage_key(
bytes: &mut &[u8],
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<Self, Error>
where
Self: Sized + 'static;
}
/// Implement `StorageKey` for `()` which can be used for keyless storage entries,
/// or to otherwise just ignore some entry.
impl StorageKey for () {
fn encode_storage_key(
&self,
_bytes: &mut Vec<u8>,
hashers: &mut StorageHashersIter,
_types: &PortableRegistry,
) -> Result<(), Error> {
_ = hashers.next_or_err();
Ok(())
}
fn decode_storage_key(
bytes: &mut &[u8],
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<Self, Error> {
let (hasher, ty_id) = match hashers.next_or_err() {
Ok((hasher, ty_id)) => (hasher, ty_id),
Err(_) if bytes.is_empty() => return Ok(()),
Err(err) => return Err(err),
};
consume_hash_returning_key_bytes(bytes, hasher, ty_id, types)?;
Ok(())
}
}
/// A storage key for static encoded values.
/// The original value is only present at construction, but can be decoded from the contained bytes.
#[derive_where(Clone, Debug, PartialOrd, PartialEq, Eq)]
pub struct StaticStorageKey<K: ?Sized> {
bytes: Static<Encoded>,
_marker: core::marker::PhantomData<K>,
}
impl<K: codec::Encode + ?Sized> StaticStorageKey<K> {
/// Creates a new static storage key
pub fn new(key: &K) -> Self {
StaticStorageKey {
bytes: Static(Encoded(key.encode())),
_marker: core::marker::PhantomData,
}
}
}
impl<K: codec::Decode + ?Sized> StaticStorageKey<K> {
/// Decodes the encoded inner bytes into the type `K`.
pub fn decoded(&self) -> Result<K, Error> {
let decoded = K::decode(&mut self.bytes())?;
Ok(decoded)
}
}
impl<K: ?Sized> StaticStorageKey<K> {
/// Returns the scale-encoded bytes that make up this key
pub fn bytes(&self) -> &[u8] {
&self.bytes.0 .0
}
}
// Note: The ?Sized bound is necessary to support e.g. `StorageKey<[u8]>`.
impl<K: ?Sized> StorageKey for StaticStorageKey<K> {
fn encode_storage_key(
&self,
bytes: &mut Vec<u8>,
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<(), Error> {
let (hasher, ty_id) = hashers.next_or_err()?;
let encoded_value = self.bytes.encode_as_type(&ty_id, types)?;
hash_bytes(&encoded_value, hasher, bytes);
Ok(())
}
fn decode_storage_key(
bytes: &mut &[u8],
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<Self, Error>
where
Self: Sized + 'static,
{
let (hasher, ty_id) = hashers.next_or_err()?;
let key_bytes = consume_hash_returning_key_bytes(bytes, hasher, ty_id, types)?;
// if the hasher had no key appended, we can't decode it into a `StaticStorageKey`.
let Some(key_bytes) = key_bytes else {
return Err(StorageAddressError::HasherCannotReconstructKey { ty_id, hasher }.into());
};
// Return the key bytes.
let key = StaticStorageKey {
bytes: Static(Encoded(key_bytes.to_vec())),
_marker: core::marker::PhantomData::<K>,
};
Ok(key)
}
}
impl StorageKey for Vec<scale_value::Value> {
fn encode_storage_key(
&self,
bytes: &mut Vec<u8>,
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<(), Error> {
for value in self.iter() {
let (hasher, ty_id) = hashers.next_or_err()?;
let encoded_value = value.encode_as_type(&ty_id, types)?;
hash_bytes(&encoded_value, hasher, bytes);
}
Ok(())
}
fn decode_storage_key(
bytes: &mut &[u8],
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<Self, Error>
where
Self: Sized + 'static,
{
let mut result: Vec<scale_value::Value> = vec![];
for (hasher, ty_id) in hashers.by_ref() {
match consume_hash_returning_key_bytes(bytes, hasher, ty_id, types)? {
Some(value_bytes) => {
let value =
scale_value::scale::decode_as_type(&mut &*value_bytes, &ty_id, types)?;
result.push(value.remove_context());
}
None => {
result.push(Value::unnamed_composite([]));
}
}
}
// We've consumed all of the hashers, so we expect to also consume all of the bytes:
if !bytes.is_empty() {
return Err(StorageAddressError::TooManyBytes.into());
}
Ok(result)
}
}
// Skip over the hash bytes (including any key at the end), returning bytes
// representing the key if one exists, or None if the hasher has no key appended.
fn consume_hash_returning_key_bytes<'a>(
bytes: &mut &'a [u8],
hasher: StorageHasher,
ty_id: u32,
types: &PortableRegistry,
) -> Result<Option<&'a [u8]>, Error> {
// Strip the bytes off for the actual hash, consuming them.
let bytes_to_strip = hasher.len_excluding_key();
if bytes.len() < bytes_to_strip {
return Err(StorageAddressError::NotEnoughBytes.into());
}
*bytes = &bytes[bytes_to_strip..];
// Now, find the bytes representing the key, consuming them.
let before_key = *bytes;
if hasher.ends_with_key() {
scale_decode::visitor::decode_with_visitor(
bytes,
&ty_id,
types,
IgnoreVisitor::<PortableRegistry>::new(),
)
.map_err(|err| Error::Decode(err.into()))?;
// Return the key bytes, having advanced the input cursor past them.
let key_bytes = &before_key[..before_key.len() - bytes.len()];
Ok(Some(key_bytes))
} else {
// There are no key bytes, so return None.
Ok(None)
}
}
/// Generates StorageKey implementations for tuples
macro_rules! impl_tuples {
($($ty:ident $n:tt),+) => {{
impl<$($ty: StorageKey),+> StorageKey for ($( $ty ),+) {
fn encode_storage_key(
&self,
bytes: &mut Vec<u8>,
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<(), Error> {
$( self.$n.encode_storage_key(bytes, hashers, types)?; )+
Ok(())
}
fn decode_storage_key(
bytes: &mut &[u8],
hashers: &mut StorageHashersIter,
types: &PortableRegistry,
) -> Result<Self, Error>
where
Self: Sized + 'static,
{
Ok( ( $( $ty::decode_storage_key(bytes, hashers, types)?, )+ ) )
}
}
}};
}
#[rustfmt::skip]
const _: () = {
impl_tuples!(A 0, B 1);
impl_tuples!(A 0, B 1, C 2);
impl_tuples!(A 0, B 1, C 2, D 3);
impl_tuples!(A 0, B 1, C 2, D 3, E 4);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7);
};
#[cfg(test)]
mod tests {
use codec::Encode;
use scale_info::{meta_type, PortableRegistry, Registry, TypeInfo};
use subxt_metadata::StorageHasher;
use crate::utils::Era;
use alloc::string::String;
use alloc::vec;
use alloc::vec::Vec;
use super::{StaticStorageKey, StorageKey};
struct KeyBuilder {
registry: Registry,
bytes: Vec<u8>,
hashers_and_ty_ids: Vec<(StorageHasher, u32)>,
}
impl KeyBuilder {
fn new() -> KeyBuilder {
KeyBuilder {
registry: Registry::new(),
bytes: vec![],
hashers_and_ty_ids: vec![],
}
}
fn add<T: TypeInfo + Encode + 'static>(mut self, value: T, hasher: StorageHasher) -> Self {
let id = self.registry.register_type(&meta_type::<T>()).id;
self.hashers_and_ty_ids.push((hasher, id));
for _i in 0..hasher.len_excluding_key() {
self.bytes.push(0);
}
value.encode_to(&mut self.bytes);
self
}
fn build(self) -> (PortableRegistry, Vec<u8>, Vec<(StorageHasher, u32)>) {
(self.registry.into(), self.bytes, self.hashers_and_ty_ids)
}
}
#[test]
fn storage_key_decoding_fuzz() {
let hashers = [
StorageHasher::Blake2_128,
StorageHasher::Blake2_128Concat,
StorageHasher::Blake2_256,
StorageHasher::Identity,
StorageHasher::Twox128,
StorageHasher::Twox256,
StorageHasher::Twox64Concat,
];
let key_preserving_hashers = [
StorageHasher::Blake2_128Concat,
StorageHasher::Identity,
StorageHasher::Twox64Concat,
];
type T4A = (
(),
StaticStorageKey<u32>,
StaticStorageKey<String>,
StaticStorageKey<Era>,
);
type T4B = (
(),
(StaticStorageKey<u32>, StaticStorageKey<String>),
StaticStorageKey<Era>,
);
type T4C = (
((), StaticStorageKey<u32>),
(StaticStorageKey<String>, StaticStorageKey<Era>),
);
let era = Era::Immortal;
for h0 in hashers {
for h1 in key_preserving_hashers {
for h2 in key_preserving_hashers {
for h3 in key_preserving_hashers {
let (types, bytes, hashers_and_ty_ids) = KeyBuilder::new()
.add((), h0)
.add(13u32, h1)
.add("Hello", h2)
.add(era, h3)
.build();
let hashers = super::StorageHashers { hashers_and_ty_ids };
let keys_a =
T4A::decode_storage_key(&mut &bytes[..], &mut hashers.iter(), &types)
.unwrap();
let keys_b =
T4B::decode_storage_key(&mut &bytes[..], &mut hashers.iter(), &types)
.unwrap();
let keys_c =
T4C::decode_storage_key(&mut &bytes[..], &mut hashers.iter(), &types)
.unwrap();
assert_eq!(keys_a.1.decoded().unwrap(), 13);
assert_eq!(keys_b.1 .0.decoded().unwrap(), 13);
assert_eq!(keys_c.0 .1.decoded().unwrap(), 13);
assert_eq!(keys_a.2.decoded().unwrap(), "Hello");
assert_eq!(keys_b.1 .1.decoded().unwrap(), "Hello");
assert_eq!(keys_c.1 .0.decoded().unwrap(), "Hello");
assert_eq!(keys_a.3.decoded().unwrap(), era);
assert_eq!(keys_b.2.decoded().unwrap(), era);
assert_eq!(keys_c.1 .1.decoded().unwrap(), era);
}
}
}
}
}
}
+118
View File
@@ -0,0 +1,118 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! these utility methods complement the [`StorageAddress`] trait, but
//! aren't things that should ever be overridden, and so don't exist on
//! the trait itself.
use crate::error::MetadataError;
use crate::metadata::{DecodeWithMetadata, MetadataExt};
use alloc::vec::Vec;
use subxt_metadata::PalletMetadata;
use subxt_metadata::{StorageEntryMetadata, StorageHasher};
use super::StorageAddress;
use crate::{error::Error, metadata::Metadata};
use alloc::borrow::ToOwned;
/// Return the root of a given [`StorageAddress`]: hash the pallet name and entry name
/// and append those bytes to the output.
pub fn write_storage_address_root_bytes<Address: StorageAddress>(
addr: &Address,
out: &mut Vec<u8>,
) {
out.extend(sp_crypto_hashing::twox_128(addr.pallet_name().as_bytes()));
out.extend(sp_crypto_hashing::twox_128(addr.entry_name().as_bytes()));
}
/// Outputs the [`storage_address_root_bytes`] as well as any additional bytes that represent
/// a lookup in a storage map at that location.
pub fn storage_address_bytes<Address: StorageAddress>(
addr: &Address,
metadata: &Metadata,
) -> Result<Vec<u8>, Error> {
let mut bytes = Vec::new();
write_storage_address_root_bytes(addr, &mut bytes);
addr.append_entry_bytes(metadata, &mut bytes)?;
Ok(bytes)
}
/// Outputs a vector containing the bytes written by [`write_storage_address_root_bytes`].
pub fn storage_address_root_bytes<Address: StorageAddress>(addr: &Address) -> Vec<u8> {
let mut bytes = Vec::new();
write_storage_address_root_bytes(addr, &mut bytes);
bytes
}
/// Take some SCALE encoded bytes and a [`StorageHasher`] and hash the bytes accordingly.
pub fn hash_bytes(input: &[u8], hasher: StorageHasher, bytes: &mut Vec<u8>) {
match hasher {
StorageHasher::Identity => bytes.extend(input),
StorageHasher::Blake2_128 => bytes.extend(sp_crypto_hashing::blake2_128(input)),
StorageHasher::Blake2_128Concat => {
bytes.extend(sp_crypto_hashing::blake2_128(input));
bytes.extend(input);
}
StorageHasher::Blake2_256 => bytes.extend(sp_crypto_hashing::blake2_256(input)),
StorageHasher::Twox128 => bytes.extend(sp_crypto_hashing::twox_128(input)),
StorageHasher::Twox256 => bytes.extend(sp_crypto_hashing::twox_256(input)),
StorageHasher::Twox64Concat => {
bytes.extend(sp_crypto_hashing::twox_64(input));
bytes.extend(input);
}
}
}
/// Return details about the given storage entry.
pub fn lookup_entry_details<'a>(
pallet_name: &str,
entry_name: &str,
metadata: &'a subxt_metadata::Metadata,
) -> Result<(PalletMetadata<'a>, &'a StorageEntryMetadata), Error> {
let pallet_metadata = metadata.pallet_by_name_err(pallet_name)?;
let storage_metadata = pallet_metadata
.storage()
.ok_or_else(|| MetadataError::StorageNotFoundInPallet(pallet_name.to_owned()))?;
let storage_entry = storage_metadata
.entry_by_name(entry_name)
.ok_or_else(|| MetadataError::StorageEntryNotFound(entry_name.to_owned()))?;
Ok((pallet_metadata, storage_entry))
}
/// Validate a storage address against the metadata.
pub fn validate_storage_address<Address: StorageAddress>(
address: &Address,
pallet: PalletMetadata<'_>,
) -> Result<(), Error> {
if let Some(hash) = address.validation_hash() {
validate_storage(pallet, address.entry_name(), hash)?;
}
Ok(())
}
/// Validate a storage entry against the metadata.
fn validate_storage(
pallet: PalletMetadata<'_>,
storage_name: &str,
hash: [u8; 32],
) -> Result<(), Error> {
let Some(expected_hash) = pallet.storage_hash(storage_name) else {
return Err(MetadataError::IncompatibleCodegen.into());
};
if expected_hash != hash {
return Err(MetadataError::IncompatibleCodegen.into());
}
Ok(())
}
/// Given some bytes, a pallet and storage name, decode the response.
pub fn decode_storage_with_metadata<T: DecodeWithMetadata>(
bytes: &mut &[u8],
metadata: &Metadata,
storage_metadata: &StorageEntryMetadata,
) -> Result<T, Error> {
let return_ty = storage_metadata.entry_type().value_ty();
let val = T::decode_with_metadata(bytes, return_ty, metadata)?;
Ok(val)
}
@@ -5,15 +5,19 @@
//! This module contains the trait and types used to represent
//! transactions that can be submitted.
use crate::{
dynamic::Value,
error::{Error, MetadataError},
metadata::Metadata,
};
use crate::error::MetadataError;
use crate::metadata::Metadata;
use crate::Error;
use alloc::borrow::{Cow, ToOwned};
use alloc::string::String;
use alloc::vec::Vec;
use codec::Encode;
use scale_encode::EncodeAsFields;
use scale_value::{Composite, ValueDef, Variant};
use std::{borrow::Cow, sync::Arc};
use scale_value::{Composite, Value, ValueDef, Variant};
pub mod signer;
pub use signer::Signer;
/// This represents a transaction payload that can be submitted
/// to a node.
@@ -48,7 +52,7 @@ pub struct ValidationDetails<'a> {
}
/// A transaction payload containing some generic `CallData`.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct Payload<CallData> {
pallet_name: Cow<'static, str>,
call_name: Cow<'static, str>,
@@ -56,10 +60,6 @@ pub struct Payload<CallData> {
validation_hash: Option<[u8; 32]>,
}
/// A boxed transaction payload.
// Dev Note: Arc used to enable easy cloning (given that we can't have dyn Clone).
pub type BoxedPayload = Payload<Arc<dyn EncodeAsFields + Send + Sync + 'static>>;
/// The type of a payload typically used for dynamic transaction payloads.
pub type DynamicPayload = Payload<Composite<()>>;
@@ -95,19 +95,6 @@ impl<CallData> Payload<CallData> {
}
}
/// Box the payload.
pub fn boxed(self) -> BoxedPayload
where
CallData: EncodeAsFields + Send + Sync + 'static,
{
BoxedPayload {
pallet_name: self.pallet_name,
call_name: self.call_name,
call_data: Arc::new(self.call_data),
validation_hash: self.validation_hash,
}
}
/// Do not validate this call prior to submitting it.
pub fn unvalidated(self) -> Self {
Self {
@@ -165,10 +152,11 @@ impl<CallData: EncodeAsFields> TxPayload for Payload<CallData> {
let mut fields = call
.fields
.iter()
.map(|f| scale_encode::Field::new(f.ty.id, f.name.as_deref()));
.map(|f| scale_encode::Field::new(&f.ty.id, f.name.as_deref()));
self.call_data
.encode_as_fields_to(&mut fields, metadata.types(), out)?;
.encode_as_fields_to(&mut fields, metadata.types(), out)
.expect("The fields are valid types from the metadata, qed;");
Ok(())
}
@@ -6,7 +6,12 @@
//! This doesn't contain much functionality itself, but is easy to convert to/from an `sp_core::AccountId32`
//! for instance, to gain functionality without forcing a dependency on Substrate crates here.
use alloc::format;
use alloc::string::String;
use alloc::vec;
use alloc::vec::Vec;
use codec::{Decode, Encode};
use derive_more::Display;
use serde::{Deserialize, Serialize};
/// A 32-byte cryptographic identifier. This is a simplified version of Substrate's
@@ -100,19 +105,22 @@ impl AccountId32 {
}
/// An error obtained from trying to interpret an SS58 encoded string into an AccountId32
#[derive(thiserror::Error, Clone, Copy, Eq, PartialEq, Debug)]
#[derive(Clone, Copy, Eq, PartialEq, Debug, Display)]
#[allow(missing_docs)]
pub enum FromSs58Error {
#[error("Base 58 requirement is violated")]
#[display(fmt = "Base 58 requirement is violated")]
BadBase58,
#[error("Length is bad")]
#[display(fmt = "Length is bad")]
BadLength,
#[error("Invalid checksum")]
#[display(fmt = "Invalid checksum")]
InvalidChecksum,
#[error("Invalid SS58 prefix byte.")]
#[display(fmt = "Invalid SS58 prefix byte.")]
InvalidPrefix,
}
#[cfg(feature = "std")]
impl std::error::Error for FromSs58Error {}
// We do this just to get a checksum to help verify the validity of the address in to_ss58check
fn ss58hash(data: &[u8]) -> Vec<u8> {
use blake2::{Blake2b512, Digest};
@@ -142,13 +150,13 @@ impl<'de> Deserialize<'de> for AccountId32 {
}
}
impl std::fmt::Display for AccountId32 {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
impl core::fmt::Display for AccountId32 {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "{}", self.to_ss58check())
}
}
impl std::str::FromStr for AccountId32 {
impl core::str::FromStr for AccountId32 {
type Err = FromSs58Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
AccountId32::from_ss58check(s)
@@ -4,13 +4,15 @@
//! Generic `scale_bits` over `bitvec`-like `BitOrder` and `BitFormat` types.
use alloc::vec;
use alloc::vec::Vec;
use codec::{Compact, Input};
use core::marker::PhantomData;
use scale_bits::{
scale::format::{Format, OrderFormat, StoreFormat},
Bits,
};
use scale_decode::IntoVisitor;
use std::marker::PhantomData;
use scale_decode::{IntoVisitor, TypeResolver};
/// Associates `bitvec::store::BitStore` trait with corresponding, type-erased `scale_bits::StoreFormat` enum.
///
@@ -144,45 +146,43 @@ impl<Store: BitStore, Order: BitOrder> codec::Encode for DecodedBits<Store, Orde
}
#[doc(hidden)]
pub struct DecodedBitsVisitor<S, O>(std::marker::PhantomData<(S, O)>);
impl<Store, Order> scale_decode::Visitor for DecodedBitsVisitor<Store, Order> {
pub struct DecodedBitsVisitor<S, O, R: TypeResolver>(core::marker::PhantomData<(S, O, R)>);
impl<Store, Order, R: TypeResolver> scale_decode::Visitor for DecodedBitsVisitor<Store, Order, R> {
type Value<'scale, 'info> = DecodedBits<Store, Order>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
type_id: scale_decode::visitor::TypeId,
types: &'info scale_info::PortableRegistry,
type_id: &R::TypeId,
types: &'info R,
) -> scale_decode::visitor::DecodeAsTypeResult<
Self,
Result<Self::Value<'scale, 'info>, Self::Error>,
> {
let res = scale_decode::visitor::decode_with_visitor(
input,
type_id.0,
types,
Bits::into_visitor(),
)
.map(|bits| DecodedBits {
bits,
_marker: PhantomData,
});
let res =
scale_decode::visitor::decode_with_visitor(input, type_id, types, Bits::into_visitor())
.map(|bits| DecodedBits {
bits,
_marker: PhantomData,
});
scale_decode::visitor::DecodeAsTypeResult::Decoded(res)
}
}
impl<Store, Order> scale_decode::IntoVisitor for DecodedBits<Store, Order> {
type Visitor = DecodedBitsVisitor<Store, Order>;
fn into_visitor() -> Self::Visitor {
type AnyVisitor<R: scale_decode::TypeResolver> = DecodedBitsVisitor<Store, Order, R>;
fn into_visitor<R: TypeResolver>() -> DecodedBitsVisitor<Store, Order, R> {
DecodedBitsVisitor(PhantomData)
}
}
impl<Store, Order> scale_encode::EncodeAsType for DecodedBits<Store, Order> {
fn encode_as_type_to(
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: u32,
types: &scale_info::PortableRegistry,
type_id: &R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.bits.encode_as_type_to(type_id, types, out)
+83
View File
@@ -0,0 +1,83 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Miscellaneous utility helpers.
mod account_id;
pub mod bits;
mod era;
mod multi_address;
mod multi_signature;
mod static_type;
mod unchecked_extrinsic;
mod wrapper_opaque;
use alloc::borrow::ToOwned;
use alloc::format;
use alloc::string::String;
use alloc::vec::Vec;
use codec::{Compact, Decode, Encode};
use derive_where::derive_where;
pub use account_id::AccountId32;
pub use era::Era;
pub use multi_address::MultiAddress;
pub use multi_signature::MultiSignature;
pub use static_type::Static;
pub use unchecked_extrinsic::UncheckedExtrinsic;
pub use wrapper_opaque::WrapperKeepOpaque;
// Used in codegen
#[doc(hidden)]
pub use primitive_types::{H160, H256, H512};
/// Wraps an already encoded byte vector, prevents being encoded as a raw byte vector as part of
/// the transaction payload
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct Encoded(pub Vec<u8>);
impl codec::Encode for Encoded {
fn encode(&self) -> Vec<u8> {
self.0.to_owned()
}
}
/// Decodes a compact encoded value from the beginning of the provided bytes,
/// returning the value and any remaining bytes.
pub fn strip_compact_prefix(bytes: &[u8]) -> Result<(u64, &[u8]), codec::Error> {
let cursor = &mut &*bytes;
let val = <Compact<u64>>::decode(cursor)?;
Ok((val.0, *cursor))
}
/// A version of [`core::marker::PhantomData`] that is also Send and Sync (which is fine
/// because regardless of the generic param, it is always possible to Send + Sync this
/// 0 size type).
#[derive(Encode, Decode, scale_info::TypeInfo)]
#[derive_where(Clone, PartialEq, Debug, Eq, Default, Hash)]
#[scale_info(skip_type_params(T))]
#[doc(hidden)]
pub struct PhantomDataSendSync<T>(core::marker::PhantomData<T>);
impl<T> PhantomDataSendSync<T> {
pub fn new() -> Self {
Self(core::marker::PhantomData)
}
}
unsafe impl<T> Send for PhantomDataSendSync<T> {}
unsafe impl<T> Sync for PhantomDataSendSync<T> {}
/// This represents a key-value collection and is SCALE compatible
/// with collections like BTreeMap. This has the same type params
/// as `BTreeMap` which allows us to easily swap the two during codegen.
pub type KeyedVec<K, V> = Vec<(K, V)>;
/// A unit marker struct signalling that some property is true
pub struct Yes;
/// A quick helper to encode some bytes to hex.
pub fn to_hex(bytes: impl AsRef<[u8]>) -> String {
format!("0x{}", hex::encode(bytes.as_ref()))
}
@@ -6,6 +6,7 @@
//! This doesn't contain much functionality itself, but is easy to convert to/from an `sp_runtime::MultiAddress`
//! for instance, to gain functionality without forcing a dependency on Substrate crates here.
use alloc::vec::Vec;
use codec::{Decode, Encode};
/// A multi-format address wrapper for on-chain accounts. This is a simplified version of Substrate's
@@ -3,9 +3,11 @@
// see LICENSE for license details.
use codec::{Decode, Encode};
use scale_decode::{visitor::DecodeAsTypeResult, IntoVisitor, Visitor};
use scale_decode::{visitor::DecodeAsTypeResult, IntoVisitor, TypeResolver, Visitor};
use scale_encode::EncodeAsType;
use alloc::vec::Vec;
/// If the type inside this implements [`Encode`], this will implement [`scale_encode::EncodeAsType`].
/// If the type inside this implements [`Decode`], this will implement [`scale_decode::DecodeAsType`].
///
@@ -18,10 +20,10 @@ use scale_encode::EncodeAsType;
pub struct Static<T>(pub T);
impl<T: Encode> EncodeAsType for Static<T> {
fn encode_as_type_to(
fn encode_as_type_to<R: TypeResolver>(
&self,
_type_id: u32,
_types: &scale_decode::PortableRegistry,
_type_id: &R::TypeId,
_types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.0.encode_to(out);
@@ -29,17 +31,18 @@ impl<T: Encode> EncodeAsType for Static<T> {
}
}
pub struct StaticDecodeAsTypeVisitor<T>(std::marker::PhantomData<T>);
pub struct StaticDecodeAsTypeVisitor<T, R>(core::marker::PhantomData<(T, R)>);
impl<T: Decode> Visitor for StaticDecodeAsTypeVisitor<T> {
impl<T: Decode, R: TypeResolver> Visitor for StaticDecodeAsTypeVisitor<T, R> {
type Value<'scale, 'info> = Static<T>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
_type_id: scale_decode::visitor::TypeId,
_types: &'info scale_info::PortableRegistry,
_type_id: &R::TypeId,
_types: &'info R,
) -> DecodeAsTypeResult<Self, Result<Self::Value<'scale, 'info>, Self::Error>> {
use scale_decode::{visitor::DecodeError, Error};
let decoded = T::decode(input)
@@ -50,9 +53,9 @@ impl<T: Decode> Visitor for StaticDecodeAsTypeVisitor<T> {
}
impl<T: Decode> IntoVisitor for Static<T> {
type Visitor = StaticDecodeAsTypeVisitor<T>;
fn into_visitor() -> Self::Visitor {
StaticDecodeAsTypeVisitor(std::marker::PhantomData)
type AnyVisitor<R: TypeResolver> = StaticDecodeAsTypeVisitor<T, R>;
fn into_visitor<R: TypeResolver>() -> StaticDecodeAsTypeVisitor<T, R> {
StaticDecodeAsTypeVisitor(core::marker::PhantomData)
}
}
@@ -64,14 +67,14 @@ impl<T> From<T> for Static<T> {
}
// Static<T> is just a marker type and should be as transparent as possible:
impl<T> std::ops::Deref for Static<T> {
impl<T> core::ops::Deref for Static<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> std::ops::DerefMut for Static<T> {
impl<T> core::ops::DerefMut for Static<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
@@ -9,12 +9,13 @@
//! runtime APIs. Deriving `EncodeAsType` would lead to the inner
//! bytes to be re-encoded (length prefixed).
use std::marker::PhantomData;
use core::marker::PhantomData;
use codec::{Decode, Encode};
use scale_decode::{visitor::DecodeAsTypeResult, DecodeAsType, IntoVisitor, Visitor};
use scale_decode::{visitor::DecodeAsTypeResult, DecodeAsType, IntoVisitor, TypeResolver, Visitor};
use super::{Encoded, Static};
use alloc::vec::Vec;
/// The unchecked extrinsic from substrate.
#[derive(Clone, Debug, Eq, PartialEq, Encode)]
@@ -52,10 +53,10 @@ impl<Address, Call, Signature, Extra> Decode
impl<Address, Call, Signature, Extra> scale_encode::EncodeAsType
for UncheckedExtrinsic<Address, Call, Signature, Extra>
{
fn encode_as_type_to(
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: u32,
types: &scale_info::PortableRegistry,
type_id: &R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.0.encode_as_type_to(type_id, types, out)
@@ -78,32 +79,35 @@ impl<Address, Call, Signature, Extra> From<UncheckedExtrinsic<Address, Call, Sig
}
}
pub struct UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra>(
PhantomData<(Address, Call, Signature, Extra)>,
pub struct UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R: TypeResolver>(
PhantomData<(Address, Call, Signature, Extra, R)>,
);
impl<Address, Call, Signature, Extra> Visitor
for UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra>
impl<Address, Call, Signature, Extra, R: TypeResolver> Visitor
for UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R>
{
type Value<'scale, 'info> = UncheckedExtrinsic<Address, Call, Signature, Extra>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
type_id: scale_decode::visitor::TypeId,
types: &'info scale_info::PortableRegistry,
type_id: &R::TypeId,
types: &'info R,
) -> DecodeAsTypeResult<Self, Result<Self::Value<'scale, 'info>, Self::Error>> {
DecodeAsTypeResult::Decoded(Self::Value::decode_as_type(input, type_id.0, types))
DecodeAsTypeResult::Decoded(Self::Value::decode_as_type(input, type_id, types))
}
}
impl<Address, Call, Signature, Extra> IntoVisitor
for UncheckedExtrinsic<Address, Call, Signature, Extra>
{
type Visitor = UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra>;
type AnyVisitor<R: TypeResolver> =
UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R>;
fn into_visitor() -> Self::Visitor {
fn into_visitor<R: TypeResolver>(
) -> UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R> {
UncheckedExtrinsicDecodeAsTypeVisitor(PhantomData)
}
}
@@ -112,6 +116,8 @@ impl<Address, Call, Signature, Extra> IntoVisitor
pub mod tests {
use super::*;
use alloc::vec;
#[test]
fn unchecked_extrinsic_encoding() {
// A tx is basically some bytes with a compact length prefix; ie an encoded vec:
@@ -4,10 +4,13 @@
use super::PhantomDataSendSync;
use codec::{Compact, Decode, DecodeAll, Encode};
use derivative::Derivative;
use scale_decode::{IntoVisitor, Visitor};
use derive_where::derive_where;
use scale_decode::{ext::scale_type_resolver::visitor, IntoVisitor, TypeResolver, Visitor};
use scale_encode::EncodeAsType;
use alloc::format;
use alloc::vec::Vec;
/// A wrapper for any type `T` which implement encode/decode in a way compatible with `Vec<u8>`.
/// [`WrapperKeepOpaque`] stores the type only in its opaque format, aka as a `Vec<u8>`. To
/// access the real type `T` [`Self::try_decode`] needs to be used.
@@ -18,15 +21,8 @@ use scale_encode::EncodeAsType;
// - However, the TypeInfo describes the type as a composite with first a compact encoded length and next the type itself.
// [`Encode`] and [`Decode`] impls will "just work" to take this into a `Vec<u8>`, but we need a custom [`EncodeAsType`]
// and [`Visitor`] implementation to encode and decode based on TypeInfo.
#[derive(Derivative, Encode, Decode)]
#[derivative(
Debug(bound = ""),
Clone(bound = ""),
PartialEq(bound = ""),
Eq(bound = ""),
Default(bound = ""),
Hash(bound = "")
)]
#[derive(Encode, Decode)]
#[derive_where(Debug, Clone, PartialEq, Eq, Default, Hash)]
pub struct WrapperKeepOpaque<T> {
data: Vec<u8>,
_phantom: PhantomDataSendSync<T>,
@@ -74,57 +70,47 @@ impl<T> WrapperKeepOpaque<T> {
}
impl<T> EncodeAsType for WrapperKeepOpaque<T> {
fn encode_as_type_to(
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: u32,
types: &scale_info::PortableRegistry,
type_id: &R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
use scale_encode::error::{Error, ErrorKind, Kind};
let Some(ty) = types.resolve(type_id) else {
return Err(Error::new(ErrorKind::TypeNotFound(type_id)));
};
// Do a basic check that the target shape lines up.
let scale_info::TypeDef::Composite(_) = &ty.type_def else {
return Err(Error::new(ErrorKind::WrongShape {
let visitor = visitor::new(out, |_, _| {
// Check that the target shape lines up: any other shape but composite is wrong.
Err(Error::new(ErrorKind::WrongShape {
actual: Kind::Struct,
expected: type_id,
}));
};
expected_id: format!("{:?}", type_id),
}))
})
.visit_composite(|out, _fields| {
self.data.encode_to(out);
Ok(())
});
// Check that the name also lines up.
if ty.path.ident().as_deref() != Some("WrapperKeepOpaque") {
return Err(Error::new(ErrorKind::WrongShape {
actual: Kind::Struct,
expected: type_id,
}));
}
// Just blat the bytes out.
self.data.encode_to(out);
Ok(())
types
.resolve_type(type_id, visitor)
.map_err(|_| Error::new(ErrorKind::TypeNotFound(format!("{:?}", type_id))))?
}
}
pub struct WrapperKeepOpaqueVisitor<T>(std::marker::PhantomData<T>);
impl<T> Visitor for WrapperKeepOpaqueVisitor<T> {
pub struct WrapperKeepOpaqueVisitor<T, R>(core::marker::PhantomData<(T, R)>);
impl<T, R: TypeResolver> Visitor for WrapperKeepOpaqueVisitor<T, R> {
type Value<'scale, 'info> = WrapperKeepOpaque<T>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn visit_composite<'scale, 'info>(
self,
value: &mut scale_decode::visitor::types::Composite<'scale, 'info>,
_type_id: scale_decode::visitor::TypeId,
value: &mut scale_decode::visitor::types::Composite<'scale, 'info, R>,
_type_id: &R::TypeId,
) -> Result<Self::Value<'scale, 'info>, Self::Error> {
use scale_decode::error::{Error, ErrorKind};
if value.path().ident().as_deref() != Some("WrapperKeepOpaque") {
return Err(Error::custom_str(
"Type to decode is not 'WrapperTypeKeepOpaque'",
));
}
// TODO: When `scale-type-resolver` [provides struct names](https://github.com/paritytech/scale-type-resolver/issues/4), check that this struct name is `WrapperKeepOpaque`
if value.remaining() != 2 {
return Err(Error::new(ErrorKind::WrongLength {
actual_len: value.remaining(),
@@ -151,9 +137,9 @@ impl<T> Visitor for WrapperKeepOpaqueVisitor<T> {
}
impl<T> IntoVisitor for WrapperKeepOpaque<T> {
type Visitor = WrapperKeepOpaqueVisitor<T>;
fn into_visitor() -> Self::Visitor {
WrapperKeepOpaqueVisitor(std::marker::PhantomData)
type AnyVisitor<R: TypeResolver> = WrapperKeepOpaqueVisitor<T, R>;
fn into_visitor<R: TypeResolver>() -> WrapperKeepOpaqueVisitor<T, R> {
WrapperKeepOpaqueVisitor(core::marker::PhantomData)
}
}
@@ -161,6 +147,8 @@ impl<T> IntoVisitor for WrapperKeepOpaque<T> {
mod test {
use scale_decode::DecodeAsType;
use alloc::vec;
use super::*;
// Copied from https://github.com/paritytech/substrate/blob/master/frame/support/src/traits/misc.rs
@@ -198,14 +186,14 @@ mod test {
+ Encode
+ Decode
+ PartialEq
+ std::fmt::Debug
+ core::fmt::Debug
+ scale_info::TypeInfo
+ 'static,
{
let (type_id, types) = make_type::<T>();
let scale_codec_encoded = t.encode();
let encode_as_type_encoded = t.encode_as_type(type_id, &types).unwrap();
let encode_as_type_encoded = t.encode_as_type(&type_id, &types).unwrap();
assert_eq!(
scale_codec_encoded, encode_as_type_encoded,
@@ -213,7 +201,7 @@ mod test {
);
let decode_as_type_bytes = &mut &*scale_codec_encoded;
let decoded_as_type = T::decode_as_type(decode_as_type_bytes, type_id, &types)
let decoded_as_type = T::decode_as_type(decode_as_type_bytes, &type_id, &types)
.expect("decode-as-type decodes");
let decode_scale_codec_bytes = &mut &*scale_codec_encoded;
+10 -10
View File
@@ -2512,7 +2512,7 @@ dependencies = [
]
[[package]]
name = "sp-core-hashing"
name = "sp-crypto-hashing"
version = "13.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb8524f01591ee58b46cd83c9dbc0fcffd2fd730dabec4f59326cd58a00f17e2"
@@ -2551,7 +2551,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "subxt"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"async-trait",
"base58",
@@ -2573,7 +2573,7 @@ dependencies = [
"scale-value",
"serde",
"serde_json",
"sp-core-hashing",
"sp-crypto-hashing",
"subxt-lightclient",
"subxt-macro",
"subxt-metadata",
@@ -2585,7 +2585,7 @@ dependencies = [
[[package]]
name = "subxt-codegen"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"frame-metadata 16.0.0",
"heck",
@@ -2604,7 +2604,7 @@ dependencies = [
[[package]]
name = "subxt-lightclient"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"futures",
"futures-util",
@@ -2619,7 +2619,7 @@ dependencies = [
[[package]]
name = "subxt-macro"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"darling 0.20.3",
"parity-scale-codec",
@@ -2632,18 +2632,18 @@ dependencies = [
[[package]]
name = "subxt-metadata"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"frame-metadata 16.0.0",
"parity-scale-codec",
"scale-info",
"sp-core-hashing",
"sp-crypto-hashing",
"thiserror",
]
[[package]]
name = "subxt-signer"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"bip39",
"hex",
@@ -2655,7 +2655,7 @@ dependencies = [
"secp256k1",
"secrecy",
"sha2 0.10.8",
"sp-core-hashing",
"sp-crypto-hashing",
"subxt",
"thiserror",
"zeroize",
+114 -174
View File
@@ -466,43 +466,19 @@ dependencies = [
[[package]]
name = "darling"
version = "0.14.4"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850"
checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391"
dependencies = [
"darling_core 0.14.4",
"darling_macro 0.14.4",
]
[[package]]
name = "darling"
version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e"
dependencies = [
"darling_core 0.20.3",
"darling_macro 0.20.3",
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.14.4"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn 1.0.109",
]
[[package]]
name = "darling_core"
version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621"
checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f"
dependencies = [
"fnv",
"ident_case",
@@ -514,22 +490,11 @@ dependencies = [
[[package]]
name = "darling_macro"
version = "0.14.4"
version = "0.20.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e"
checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
dependencies = [
"darling_core 0.14.4",
"quote",
"syn 1.0.109",
]
[[package]]
name = "darling_macro"
version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
dependencies = [
"darling_core 0.20.3",
"darling_core",
"quote",
"syn 2.0.48",
]
@@ -610,9 +575,9 @@ dependencies = [
[[package]]
name = "either"
version = "1.9.0"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
[[package]]
name = "equivalent"
@@ -653,9 +618,6 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534"
dependencies = [
"byteorder",
"rand",
"rustc-hex",
"static_assertions",
]
@@ -694,7 +656,6 @@ dependencies = [
"cfg-if",
"parity-scale-codec",
"scale-info",
"serde",
]
[[package]]
@@ -1227,15 +1188,6 @@ dependencies = [
"unicode-normalization",
]
[[package]]
name = "impl-codec"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f"
dependencies = [
"parity-scale-codec",
]
[[package]]
name = "impl-serde"
version = "0.4.0"
@@ -1329,18 +1281,18 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]]
name = "js-sys"
version = "0.3.67"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1"
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
dependencies = [
"wasm-bindgen",
]
[[package]]
name = "jsonrpsee"
version = "0.21.0"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9579d0ca9fb30da026bac2f0f7d9576ec93489aeb7cd4971dd5b4617d82c79b2"
checksum = "16fcc9dd231e72d22993f1643d5f7f0db785737dbe3c3d7ca222916ab4280795"
dependencies = [
"jsonrpsee-client-transport",
"jsonrpsee-core",
@@ -1350,9 +1302,9 @@ dependencies = [
[[package]]
name = "jsonrpsee-client-transport"
version = "0.21.0"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9f9ed46590a8d5681975f126e22531698211b926129a40a2db47cbca429220"
checksum = "0476c96eb741b40d39dcb39d0124e3b9be9840ec77653c42a0996563ae2a53f7"
dependencies = [
"futures-channel",
"futures-util",
@@ -1373,9 +1325,9 @@ dependencies = [
[[package]]
name = "jsonrpsee-core"
version = "0.21.0"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "776d009e2f591b78c038e0d053a796f94575d66ca4e77dd84bfc5e81419e436c"
checksum = "b974d8f6139efbe8425f32cb33302aba6d5e049556b5bfc067874e7a0da54a2e"
dependencies = [
"anyhow",
"async-lock",
@@ -1398,9 +1350,9 @@ dependencies = [
[[package]]
name = "jsonrpsee-http-client"
version = "0.21.0"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b7de9f3219d95985eb77fd03194d7c1b56c19bce1abfcc9d07462574b15572"
checksum = "19dc795a277cff37f27173b3ca790d042afcc0372c34a7ca068d2e76de2cb6d1"
dependencies = [
"async-trait",
"hyper",
@@ -1418,9 +1370,9 @@ dependencies = [
[[package]]
name = "jsonrpsee-types"
version = "0.21.0"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3266dfb045c9174b24c77c2dfe0084914bb23a6b2597d70c9dc6018392e1cd1b"
checksum = "b13dac43c1a9fc2648b37f306b0a5b0e29b2a6e1c36a33b95c1948da2494e9c5"
dependencies = [
"anyhow",
"beef",
@@ -1713,18 +1665,18 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pin-project"
version = "1.1.3"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.1.3"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [
"proc-macro2",
"quote",
@@ -1794,9 +1746,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2"
dependencies = [
"fixed-hash",
"impl-codec",
"impl-serde",
"scale-info",
"uint",
]
@@ -1947,12 +1896,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hex"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6"
[[package]]
name = "rustc_version"
version = "0.4.0"
@@ -2084,71 +2027,42 @@ checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c"
[[package]]
name = "scale-bits"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "036575c29af9b6e4866ffb7fa055dbf623fe7a9cc159b33786de6013a6969d89"
checksum = "662d10dcd57b1c2a3c41c9cf68f71fb09747ada1ea932ad961aca7e2ca28315f"
dependencies = [
"parity-scale-codec",
"scale-info",
"scale-type-resolver",
"serde",
]
[[package]]
name = "scale-decode"
version = "0.10.0"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7caaf753f8ed1ab4752c6afb20174f03598c664724e0e32628e161c21000ff76"
checksum = "afc79ba56a1c742f5aeeed1f1801f3edf51f7e818f0a54582cac6f131364ea7b"
dependencies = [
"derive_more",
"parity-scale-codec",
"primitive-types",
"scale-bits",
"scale-decode-derive",
"scale-info",
"scale-type-resolver",
"smallvec",
]
[[package]]
name = "scale-decode-derive"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3475108a1b62c7efd1b5c65974f30109a598b2f45f23c9ae030acb9686966db"
dependencies = [
"darling 0.14.4",
"proc-macro-crate 1.3.1",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "scale-encode"
version = "0.5.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d70cb4b29360105483fac1ed567ff95d65224a14dd275b6303ed0a654c78de5"
checksum = "628800925a33794fb5387781b883b5e14d130fece9af5a63613867b8de07c5c7"
dependencies = [
"derive_more",
"parity-scale-codec",
"primitive-types",
"scale-bits",
"scale-encode-derive",
"scale-info",
"scale-type-resolver",
"smallvec",
]
[[package]]
name = "scale-encode-derive"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "995491f110efdc6bea96d6a746140e32bfceb4ea47510750a5467295a4707a25"
dependencies = [
"darling 0.14.4",
"proc-macro-crate 1.3.1",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "scale-info"
version = "2.10.0"
@@ -2176,10 +2090,20 @@ dependencies = [
]
[[package]]
name = "scale-typegen"
name = "scale-type-resolver"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00860983481ac590ac87972062909bef0d6a658013b592ccc0f2feb272feab11"
checksum = "10b800069bfd43374e0f96f653e0d46882a2cb16d6d961ac43bea80f26c76843"
dependencies = [
"scale-info",
"smallvec",
]
[[package]]
name = "scale-typegen"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d6108609f017741c78d35967c7afe4aeaa3999b848282581041428e10d23b63"
dependencies = [
"proc-macro2",
"quote",
@@ -2190,12 +2114,10 @@ dependencies = [
[[package]]
name = "scale-value"
version = "0.13.0"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58223c7691bf0bd46b43c9aea6f0472d1067f378d574180232358d7c6e0a8089"
checksum = "c07ccfee963104335c971aaf8b7b0e749be8569116322df23f1f75c4ca9e4a28"
dependencies = [
"base58",
"blake2",
"derive_more",
"either",
"frame-metadata 15.1.0",
@@ -2204,8 +2126,7 @@ dependencies = [
"scale-decode",
"scale-encode",
"scale-info",
"serde",
"yap",
"scale-type-resolver",
]
[[package]]
@@ -2289,9 +2210,9 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73"
[[package]]
name = "serde"
version = "1.0.195"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
dependencies = [
"serde_derive",
]
@@ -2318,9 +2239,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.195"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [
"proc-macro2",
"quote",
@@ -2329,9 +2250,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.111"
version = "1.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4"
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
dependencies = [
"itoa",
"ryu",
@@ -2539,10 +2460,10 @@ dependencies = [
]
[[package]]
name = "sp-core-hashing"
version = "13.0.0"
name = "sp-crypto-hashing"
version = "15.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb8524f01591ee58b46cd83c9dbc0fcffd2fd730dabec4f59326cd58a00f17e2"
checksum = "1e0f4990add7b2cefdeca883c0efa99bb4d912cb2196120e1500c0cc099553b0"
dependencies = [
"blake2b_simd",
"byteorder",
@@ -2578,11 +2499,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "subxt"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"async-trait",
"base58",
"blake2",
"derivative",
"either",
"frame-metadata 16.0.0",
@@ -2601,7 +2520,7 @@ dependencies = [
"scale-value",
"serde",
"serde_json",
"sp-core-hashing",
"sp-crypto-hashing",
"subxt-lightclient",
"subxt-macro",
"subxt-metadata",
@@ -2612,7 +2531,7 @@ dependencies = [
[[package]]
name = "subxt-codegen"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"frame-metadata 16.0.0",
"getrandom",
@@ -2631,8 +2550,34 @@ dependencies = [
]
[[package]]
name = "subxt-lightclient"
name = "subxt-core"
version = "0.34.0"
dependencies = [
"base58",
"blake2",
"derivative",
"derive_more",
"frame-metadata 16.0.0",
"hashbrown 0.14.3",
"hex",
"impl-serde",
"parity-scale-codec",
"primitive-types",
"scale-bits",
"scale-decode",
"scale-encode",
"scale-info",
"scale-value",
"serde",
"serde_json",
"sp-core-hashing",
"subxt-metadata",
"tracing",
]
[[package]]
name = "subxt-lightclient"
version = "0.35.0"
dependencies = [
"futures",
"futures-timer",
@@ -2657,9 +2602,9 @@ dependencies = [
[[package]]
name = "subxt-macro"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"darling 0.20.3",
"darling",
"parity-scale-codec",
"proc-macro-error",
"quote",
@@ -2670,13 +2615,14 @@ dependencies = [
[[package]]
name = "subxt-metadata"
version = "0.34.0"
version = "0.35.0"
dependencies = [
"derive_more",
"frame-metadata 16.0.0",
"hashbrown 0.14.3",
"parity-scale-codec",
"scale-info",
"sp-core-hashing",
"thiserror",
"sp-crypto-hashing",
]
[[package]]
@@ -2709,18 +2655,18 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "thiserror"
version = "1.0.56"
version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.56"
version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81"
dependencies = [
"proc-macro2",
"quote",
@@ -2744,9 +2690,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.35.1"
version = "1.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104"
checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931"
dependencies = [
"backtrace",
"bytes",
@@ -3010,9 +2956,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.90"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406"
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@@ -3020,9 +2966,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.90"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd"
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
dependencies = [
"bumpalo",
"log",
@@ -3047,9 +2993,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.90"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999"
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3057,9 +3003,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.90"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [
"proc-macro2",
"quote",
@@ -3070,9 +3016,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.90"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b"
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
[[package]]
name = "wasm-example"
@@ -3134,9 +3080,9 @@ dependencies = [
[[package]]
name = "web-sys"
version = "0.3.67"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed"
checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -3304,12 +3250,6 @@ dependencies = [
"zeroize",
]
[[package]]
name = "yap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff4524214bc4629eba08d78ceb1d6507070cc0bcbbed23af74e19e6e924a24cf"
[[package]]
name = "yew"
version = "0.20.0"
+2 -2
View File
@@ -9,10 +9,10 @@ edition = "2021"
futures = "0.3.28"
subxt = { path = "../../subxt", default-features = false, features = ["jsonrpsee", "web"], target_arch = "wasm32" }
yew = { version = "0.20.0", features = ["csr"] }
web-sys = "0.3.63"
web-sys = "0.3.69"
hex = "0.4.3"
yew-router = "0.17.0"
js-sys = "0.3.63"
js-sys = "0.3.69"
wasm-bindgen = "0.2.86"
wasm-bindgen-futures = "0.4.36"
anyhow = "1.0.71"
+3 -1
View File
@@ -7,6 +7,7 @@ use subxt::ext::codec::{Decode, Encode};
use subxt::tx::SubmittableExtrinsic;
use subxt::tx::TxPayload;
use subxt::utils::{AccountId32, MultiSignature};
use subxt::config::DefaultExtrinsicParamsBuilder;
use crate::services::{extension_signature_for_extrinsic, get_accounts, polkadot, Account};
use web_sys::HtmlInputElement;
@@ -155,7 +156,8 @@ impl Component for SigningExamplesComponent {
return Message::Error(anyhow!("MultiSignature Decoding"));
};
let Ok(partial_signed) = api.tx().create_partial_signed_with_nonce(&remark_call, account_nonce, Default::default()) else {
let params = DefaultExtrinsicParamsBuilder::new().nonce(account_nonce).build();
let Ok(partial_signed) = api.tx().create_partial_signed_offline(&remark_call, params) else {
return Message::Error(anyhow!("PartialExtrinsic creation failed"));
};
+2 -2
View File
@@ -130,8 +130,8 @@ pub async fn extension_signature_for_extrinsic(
) -> Result<Vec<u8>, anyhow::Error> {
let genesis_hash = encode_then_hex(&api.genesis_hash());
// These numbers aren't SCALE encoded; their bytes are just converted to hex:
let spec_version = to_hex(&api.runtime_version().spec_version.to_be_bytes());
let transaction_version = to_hex(&api.runtime_version().transaction_version.to_be_bytes());
let spec_version = to_hex(&api.runtime_version().spec_version().to_be_bytes());
let transaction_version = to_hex(&api.runtime_version().transaction_version().to_be_bytes());
let nonce = to_hex(&account_nonce.to_be_bytes());
// If you construct a mortal transaction, then this block hash needs to correspond
// to the block number passed to `Era::mortal()`.
+9 -21
View File
@@ -24,23 +24,14 @@ default = ["native"]
# Exactly 1 of "web" and "native" is expected.
native = [
"smoldot-light/std",
"tokio-stream",
"tokio/sync",
"tokio/rt",
"futures-util",
]
# Enable this for web/wasm builds.
# Exactly 1 of "web" and "native" is expected.
web = [
"getrandom/js",
"smoldot",
"smoldot/std",
"smoldot-light",
"tokio-stream",
"tokio/sync",
"futures-util",
# For the light-client platform.
"wasm-bindgen-futures",
@@ -56,29 +47,26 @@ web = [
]
[dependencies]
futures = { workspace = true }
futures = { workspace = true, features = ["async-await"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["raw_value"] }
serde_json = { workspace = true, features = ["default", "raw_value"] }
thiserror = { workspace = true }
tracing = { workspace = true }
smoldot-light = { workspace = true }
tokio-stream = { workspace = true }
tokio = { workspace = true, features = ["sync"] }
futures-util = { workspace = true }
# Light client support:
smoldot = { workspace = true, optional = true }
smoldot-light = { workspace = true, optional = true }
either = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
tokio-stream = { workspace = true, optional = true }
futures-util = { workspace = true, optional = true }
# Only needed for web
js-sys = { workspace = true, optional = true }
send_wrapper = { workspace = true, optional = true }
web-sys = { workspace = true, optional = true }
wasm-bindgen = { workspace = true, optional = true }
wasm-bindgen-futures = { workspace = true, optional = true }
smoldot = { workspace = true, optional = true }
pin-project = { workspace = true, optional = true }
futures-timer = { workspace = true, optional = true }
instant = { workspace = true, optional = true }
pin-project = { workspace = true, optional = true }
# Included if "web" feature is enabled, to enable its js feature.
getrandom = { workspace = true, optional = true }
[package.metadata.docs.rs]
+331 -344
View File
@@ -1,43 +1,47 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use futures::stream::StreamExt;
use futures_util::future::{self, Either};
use serde::Deserialize;
use crate::rpc::RpcResponse;
use crate::shared_client::SharedClient;
use crate::{JsonRpcError, LightClientRpcError};
use futures::{stream::StreamExt, FutureExt};
use serde_json::value::RawValue;
use smoldot_light::platform::PlatformRef;
use std::{collections::HashMap, str::FromStr};
use tokio::sync::{mpsc, oneshot};
use tokio_stream::wrappers::UnboundedReceiverStream;
use crate::client::AddedChain;
const LOG_TARGET: &str = "subxt-light-client-background-task";
use super::LightClientRpcError;
use smoldot_light::ChainId;
const LOG_TARGET: &str = "subxt-light-client-background";
/// The response of an RPC method.
/// Response from [`BackgroundTaskHandle::request()`].
pub type MethodResponse = Result<Box<RawValue>, LightClientRpcError>;
/// Response from [`BackgroundTaskHandle::subscribe()`].
pub type SubscriptionResponse = Result<
(
SubscriptionId,
mpsc::UnboundedReceiver<Result<Box<RawValue>, JsonRpcError>>,
),
LightClientRpcError,
>;
/// Type of subscription IDs we can get back.
pub type SubscriptionId = String;
/// Message protocol between the front-end client that submits the RPC requests
/// and the backend handler that produces responses from the chain.
///
/// The light client uses a single object [`smoldot_light::JsonRpcResponses`] to
/// handle all requests and subscriptions from a chain. A background task is spawned
/// to multiplex the rpc responses and to provide them back to their rightful submitters.
/// and the background task which fetches responses from Smoldot. Hidden behind
/// the [`BackgroundTaskHandle`].
#[derive(Debug)]
pub enum FromSubxt {
enum Message {
/// The RPC method request.
Request {
/// The method of the request.
method: String,
/// The parameters of the request.
params: String,
/// Channel used to send back the result.
params: Option<Box<RawValue>>,
/// Channel used to send back the method response.
sender: oneshot::Sender<MethodResponse>,
/// The ID of the chain used to identify the chain.
chain_id: ChainId,
},
/// The RPC subscription (pub/sub) request.
Subscription {
@@ -46,37 +50,175 @@ pub enum FromSubxt {
/// The method to unsubscribe.
unsubscribe_method: String,
/// The parameters of the request.
params: String,
/// Channel used to send back the subscription ID if successful.
sub_id: oneshot::Sender<MethodResponse>,
/// Channel used to send back the notifications.
sender: mpsc::UnboundedSender<Box<RawValue>>,
/// The ID of the chain used to identify the chain.
chain_id: ChainId,
params: Option<Box<RawValue>>,
/// Channel used to send back the subscription response.
sender: oneshot::Sender<SubscriptionResponse>,
},
}
/// Background task data.
/// A handle to communicate with the background task.
#[derive(Clone, Debug)]
pub struct BackgroundTaskHandle {
to_backend: mpsc::UnboundedSender<Message>,
}
impl BackgroundTaskHandle {
/// Make an RPC request via the background task.
pub async fn request(&self, method: String, params: Option<Box<RawValue>>) -> MethodResponse {
let (tx, rx) = oneshot::channel();
self.to_backend
.send(Message::Request {
method,
params,
sender: tx,
})
.map_err(|_e| LightClientRpcError::BackgroundTaskDropped)?;
match rx.await {
Err(_e) => Err(LightClientRpcError::BackgroundTaskDropped),
Ok(response) => response,
}
}
/// Subscribe to some RPC method via the background task.
pub async fn subscribe(
&self,
method: String,
params: Option<Box<RawValue>>,
unsubscribe_method: String,
) -> SubscriptionResponse {
let (tx, rx) = oneshot::channel();
self.to_backend
.send(Message::Subscription {
method,
params,
unsubscribe_method,
sender: tx,
})
.map_err(|_e| LightClientRpcError::BackgroundTaskDropped)?;
match rx.await {
Err(_e) => Err(LightClientRpcError::BackgroundTaskDropped),
Ok(response) => response,
}
}
}
/// A background task which runs with [`BackgroundTask::run()`] and manages messages
/// coming to/from Smoldot.
#[allow(clippy::type_complexity)]
pub struct BackgroundTask<TPlatform: PlatformRef, TChain> {
/// Smoldot light client implementation that leverages the exposed platform.
client: smoldot_light::Client<TPlatform, TChain>,
/// Generates an unique monotonically increasing ID for each chain.
request_id_per_chain: HashMap<smoldot_light::ChainId, usize>,
channels: BackgroundTaskChannels,
data: BackgroundTaskData<TPlatform, TChain>,
}
impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
/// Constructs a new [`BackgroundTask`].
pub(crate) fn new(
client: SharedClient<TPlatform, TChain>,
chain_id: smoldot_light::ChainId,
from_back: smoldot_light::JsonRpcResponses,
) -> (BackgroundTask<TPlatform, TChain>, BackgroundTaskHandle) {
let (tx, rx) = mpsc::unbounded_channel();
let bg_task = BackgroundTask {
channels: BackgroundTaskChannels {
from_front: UnboundedReceiverStream::new(rx),
from_back,
},
data: BackgroundTaskData {
client,
chain_id,
last_request_id: 0,
pending_subscriptions: HashMap::new(),
requests: HashMap::new(),
subscriptions: HashMap::new(),
},
};
let bg_handle = BackgroundTaskHandle { to_backend: tx };
(bg_task, bg_handle)
}
/// Run the background task, which:
/// - Forwards messages/subscription requests to Smoldot from the front end.
/// - Forwards responses back from Smoldot to the front end.
pub async fn run(self) {
let chain_id = self.data.chain_id;
let mut channels = self.channels;
let mut data = self.data;
loop {
tokio::pin! {
let from_front_fut = channels.from_front.next().fuse();
let from_back_fut = channels.from_back.next().fuse();
}
futures::select! {
// Message coming from the front end/client.
front_message = from_front_fut => {
let Some(message) = front_message else {
tracing::trace!(target: LOG_TARGET, "Subxt channel closed");
break;
};
tracing::trace!(
target: LOG_TARGET,
"Received register message {:?}",
message
);
data.handle_requests(message).await;
},
// Message coming from Smoldot.
back_message = from_back_fut => {
let Some(back_message) = back_message else {
tracing::trace!(target: LOG_TARGET, "Smoldot RPC responses channel closed");
break;
};
tracing::trace!(
target: LOG_TARGET,
"Received smoldot RPC chain {:?} result {:?}",
chain_id, back_message
);
data.handle_rpc_response(back_message);
}
}
}
tracing::trace!(target: LOG_TARGET, "Task closed");
}
}
struct BackgroundTaskChannels {
/// Messages sent into this background task from the front end.
from_front: UnboundedReceiverStream<Message>,
/// Messages sent into the background task from Smoldot.
from_back: smoldot_light::JsonRpcResponses,
}
struct BackgroundTaskData<TPlatform: PlatformRef, TChain> {
/// A smoldot light client that can be shared.
client: SharedClient<TPlatform, TChain>,
/// Knowing the chain ID helps with debugging, but isn't overwise necessary.
chain_id: smoldot_light::ChainId,
/// Know which Id to use next for new requests/subscriptions.
last_request_id: usize,
/// Map the request ID of a RPC method to the frontend `Sender`.
requests: HashMap<(usize, smoldot_light::ChainId), oneshot::Sender<MethodResponse>>,
requests: HashMap<usize, oneshot::Sender<MethodResponse>>,
/// Subscription calls first need to make a plain RPC method
/// request to obtain the subscription ID.
///
/// The RPC method request is made in the background and the response should
/// not be sent back to the user.
/// Map the request ID of a RPC method to the frontend `Sender`.
id_to_subscription: HashMap<(usize, smoldot_light::ChainId), PendingSubscription>,
pending_subscriptions: HashMap<usize, PendingSubscription>,
/// Map the subscription ID to the frontend `Sender`.
///
/// The subscription ID is entirely generated by the node (smoldot). Therefore, it is
/// possible for two distinct subscriptions of different chains to have the same subscription ID.
subscriptions: HashMap<(usize, smoldot_light::ChainId), ActiveSubscription>,
subscriptions: HashMap<String, ActiveSubscription>,
}
/// The state needed to resolve the subscription ID and send
@@ -85,66 +227,49 @@ struct PendingSubscription {
/// Send the method response ID back to the user.
///
/// It contains the subscription ID if successful, or an JSON RPC error object.
sub_id_sender: oneshot::Sender<MethodResponse>,
/// The subscription state that is added to the `subscriptions` map only
/// if the subscription ID is successfully sent back to the user.
subscription_state: ActiveSubscription,
}
impl PendingSubscription {
/// Transforms the pending subscription into an active subscription.
fn into_parts(self) -> (oneshot::Sender<MethodResponse>, ActiveSubscription) {
(self.sub_id_sender, self.subscription_state)
}
}
/// The state of the subscription.
struct ActiveSubscription {
/// Channel to send the subscription notifications back to frontend.
sender: mpsc::UnboundedSender<Box<RawValue>>,
response_sender: oneshot::Sender<SubscriptionResponse>,
/// The unsubscribe method to call when the user drops the receiver
/// part of the channel.
unsubscribe_method: String,
}
impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
/// Constructs a new [`BackgroundTask`].
pub fn new(
client: smoldot_light::Client<TPlatform, TChain>,
) -> BackgroundTask<TPlatform, TChain> {
BackgroundTask {
client,
request_id_per_chain: Default::default(),
requests: Default::default(),
id_to_subscription: Default::default(),
subscriptions: Default::default(),
}
}
/// The state of the subscription.
struct ActiveSubscription {
/// Channel to send the subscription notifications back to frontend.
notification_sender: mpsc::UnboundedSender<Result<Box<RawValue>, JsonRpcError>>,
/// The unsubscribe method to call when the user drops the receiver
/// part of the channel.
unsubscribe_method: String,
}
impl<TPlatform: PlatformRef, TChain> BackgroundTaskData<TPlatform, TChain> {
/// Fetch and increment the request ID.
fn next_id(&mut self, chain_id: smoldot_light::ChainId) -> usize {
let next = self.request_id_per_chain.entry(chain_id).or_insert(1);
let id = *next;
*next = next.wrapping_add(1);
id
fn next_id(&mut self) -> usize {
self.last_request_id = self.last_request_id.wrapping_add(1);
self.last_request_id
}
/// Handle the registration messages received from the user.
async fn handle_requests(&mut self, message: FromSubxt) {
async fn handle_requests(&mut self, message: Message) {
match message {
FromSubxt::Request {
Message::Request {
method,
params,
sender,
chain_id,
} => {
let id = self.next_id(chain_id);
let id = self.next_id();
let chain_id = self.chain_id;
let params = match &params {
Some(params) => params.get(),
None => "null",
};
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{}", "method":"{}","params":{}}}"#,
id, method, params
);
self.requests.insert((id, chain_id), sender);
self.requests.insert(id, sender);
tracing::trace!(target: LOG_TARGET, "Tracking request id={id} chain={chain_id:?}");
let result = self.client.json_rpc_request(request, chain_id);
@@ -157,12 +282,12 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
let sender = self
.requests
.remove(&(id, chain_id))
.remove(&id)
.expect("Channel is inserted above; qed");
// Send the error back to frontend.
if sender
.send(Err(LightClientRpcError::Request(err.to_string())))
.send(Err(LightClientRpcError::SmoldotError(err.to_string())))
.is_err()
{
tracing::warn!(
@@ -174,32 +299,32 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
tracing::trace!(target: LOG_TARGET, "Submitted to smoldot request with id={id}");
}
}
FromSubxt::Subscription {
Message::Subscription {
method,
unsubscribe_method,
params,
sub_id,
sender,
chain_id,
} => {
let id = self.next_id();
let chain_id = self.chain_id;
// For subscriptions we need to make a plain RPC request to the subscription method.
// The server will return as a result the subscription ID.
let id = self.next_id(chain_id);
let params = match &params {
Some(params) => params.get(),
None => "null",
};
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{}", "method":"{}","params":{}}}"#,
id, method, params
);
tracing::trace!(target: LOG_TARGET, "Tracking subscription request id={id} chain={chain_id:?}");
let subscription_id_state = PendingSubscription {
sub_id_sender: sub_id,
subscription_state: ActiveSubscription {
sender,
unsubscribe_method,
},
let pending_subscription = PendingSubscription {
response_sender: sender,
unsubscribe_method,
};
self.id_to_subscription
.insert((id, chain_id), subscription_id_state);
self.pending_subscriptions.insert(id, pending_subscription);
let result = self.client.json_rpc_request(request, chain_id);
if let Err(err) = result {
@@ -209,14 +334,14 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
err.to_string()
);
let subscription_id_state = self
.id_to_subscription
.remove(&(id, chain_id))
.pending_subscriptions
.remove(&id)
.expect("Channels are inserted above; qed");
// Send the error back to frontend.
if subscription_id_state
.sub_id_sender
.send(Err(LightClientRpcError::Request(err.to_string())))
.response_sender
.send(Err(LightClientRpcError::SmoldotError(err.to_string())))
.is_err()
{
tracing::warn!(
@@ -232,19 +357,75 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
}
/// Parse the response received from the light client and sent it to the appropriate user.
fn handle_rpc_response(&mut self, chain_id: smoldot_light::ChainId, response: String) {
tracing::trace!(target: LOG_TARGET, "Received from smoldot response={response} chain={chain_id:?}");
fn handle_rpc_response(&mut self, response: String) {
let chain_id = self.chain_id;
tracing::trace!(target: LOG_TARGET, "Received from smoldot response='{response}' chain={chain_id:?}");
match RpcResponse::from_str(&response) {
Ok(RpcResponse::Error { id, error }) => {
Ok(RpcResponse::Method { id, result }) => {
let Ok(id) = id.parse::<usize>() else {
tracing::warn!(target: LOG_TARGET, "Cannot send response. Id={id} chain={chain_id:?} is not a valid number");
return;
};
// Send the response back.
if let Some(sender) = self.requests.remove(&id) {
if sender.send(Ok(result)).is_err() {
tracing::warn!(
target: LOG_TARGET,
"Cannot send method response to id={id} chain={chain_id:?}",
);
}
} else if let Some(pending_subscription) = self.pending_subscriptions.remove(&id) {
let Ok(sub_id) = serde_json::from_str::<SubscriptionId>(result.get()) else {
tracing::warn!(
target: LOG_TARGET,
"Subscription id='{result}' chain={chain_id:?} is not a valid string",
);
return;
};
tracing::trace!(target: LOG_TARGET, "Received subscription id={sub_id} chain={chain_id:?}");
let (sub_tx, sub_rx) = mpsc::unbounded_channel();
// Send the method response and a channel to receive notifications back.
if pending_subscription
.response_sender
.send(Ok((sub_id.clone(), sub_rx)))
.is_err()
{
tracing::warn!(
target: LOG_TARGET,
"Cannot send subscription ID response to id={id} chain={chain_id:?}",
);
return;
}
// Store the other end of the notif channel to send future subscription notifications to.
self.subscriptions.insert(
sub_id,
ActiveSubscription {
notification_sender: sub_tx,
unsubscribe_method: pending_subscription.unsubscribe_method,
},
);
} else {
tracing::warn!(
target: LOG_TARGET,
"Response id={id} chain={chain_id:?} is not tracked",
);
}
}
Ok(RpcResponse::MethodError { id, error }) => {
let Ok(id) = id.parse::<usize>() else {
tracing::warn!(target: LOG_TARGET, "Cannot send error. Id={id} chain={chain_id:?} is not a valid number");
return;
};
if let Some(sender) = self.requests.remove(&(id, chain_id)) {
if let Some(sender) = self.requests.remove(&id) {
if sender
.send(Err(LightClientRpcError::Request(error.to_string())))
.send(Err(LightClientRpcError::JsonRpcError(JsonRpcError(error))))
.is_err()
{
tracing::warn!(
@@ -252,12 +433,10 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
"Cannot send method response to id={id} chain={chain_id:?}",
);
}
} else if let Some(subscription_id_state) =
self.id_to_subscription.remove(&(id, chain_id))
{
} else if let Some(subscription_id_state) = self.pending_subscriptions.remove(&id) {
if subscription_id_state
.sub_id_sender
.send(Err(LightClientRpcError::Request(error.to_string())))
.response_sender
.send(Err(LightClientRpcError::JsonRpcError(JsonRpcError(error))))
.is_err()
{
tracing::warn!(
@@ -267,95 +446,44 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
}
}
}
Ok(RpcResponse::Method { id, result }) => {
let Ok(id) = id.parse::<usize>() else {
tracing::warn!(target: LOG_TARGET, "Cannot send response. Id={id} chain={chain_id:?} is not a valid number");
return;
};
// Send the response back.
if let Some(sender) = self.requests.remove(&(id, chain_id)) {
if sender.send(Ok(result)).is_err() {
tracing::warn!(
target: LOG_TARGET,
"Cannot send method response to id={id} chain={chain_id:?}",
);
}
} else if let Some(pending_subscription) =
self.id_to_subscription.remove(&(id, chain_id))
{
let Ok(sub_id) = result
.get()
.trim_start_matches('"')
.trim_end_matches('"')
.parse::<usize>()
else {
tracing::warn!(
target: LOG_TARGET,
"Subscription id={result} chain={chain_id:?} is not a valid number",
);
return;
};
tracing::trace!(target: LOG_TARGET, "Received subscription id={sub_id} chain={chain_id:?}");
let (sub_id_sender, active_subscription) = pending_subscription.into_parts();
if sub_id_sender.send(Ok(result)).is_err() {
tracing::warn!(
target: LOG_TARGET,
"Cannot send method response to id={id} chain={chain_id:?}",
);
return;
}
// Track this subscription ID if send is successful.
self.subscriptions
.insert((sub_id, chain_id), active_subscription);
} else {
Ok(RpcResponse::Notification {
method,
subscription_id,
result,
}) => {
let Some(active_subscription) = self.subscriptions.get_mut(&subscription_id) else {
tracing::warn!(
target: LOG_TARGET,
"Response id={id} chain={chain_id:?} is not tracked",
"Subscription response id={subscription_id} chain={chain_id:?} method={method} is not tracked",
);
return;
};
if active_subscription
.notification_sender
.send(Ok(result))
.is_err()
{
self.unsubscribe(&subscription_id, chain_id);
}
}
Ok(RpcResponse::Subscription { method, id, result }) => {
let Ok(id) = id.parse::<usize>() else {
tracing::warn!(target: LOG_TARGET, "Cannot send subscription. Id={id} chain={chain_id:?} is not a valid number");
return;
};
let Some(subscription_state) = self.subscriptions.get_mut(&(id, chain_id)) else {
Ok(RpcResponse::NotificationError {
method,
subscription_id,
error,
}) => {
let Some(active_subscription) = self.subscriptions.get_mut(&subscription_id) else {
tracing::warn!(
target: LOG_TARGET,
"Subscription response id={id} chain={chain_id:?} method={method} is not tracked",
"Subscription error id={subscription_id} chain={chain_id:?} method={method} is not tracked",
);
return;
};
if subscription_state.sender.send(result).is_ok() {
// Nothing else to do, user is informed about the notification.
return;
}
// User dropped the receiver, unsubscribe from the method and remove internal tracking.
let Some(subscription_state) = self.subscriptions.remove(&(id, chain_id)) else {
// State is checked to be some above, so this should never happen.
return;
};
// Make a call to unsubscribe from this method.
let unsub_id = self.next_id(chain_id);
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{}", "method":"{}","params":["{}"]}}"#,
unsub_id, subscription_state.unsubscribe_method, id
);
if let Err(err) = self.client.json_rpc_request(request, chain_id) {
tracing::warn!(
target: LOG_TARGET,
"Failed to unsubscribe id={id:?} chain={chain_id:?} method={:?} err={err:?}", subscription_state.unsubscribe_method
);
} else {
tracing::debug!(target: LOG_TARGET,"Unsubscribe id={id:?} chain={chain_id:?} method={:?}", subscription_state.unsubscribe_method);
if active_subscription
.notification_sender
.send(Err(JsonRpcError(error)))
.is_err()
{
self.unsubscribe(&subscription_id, chain_id);
}
}
Err(err) => {
@@ -364,169 +492,28 @@ impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
}
}
/// Perform the main background task:
/// - receiving requests from subxt RPC method / subscriptions
/// - provides the results from the light client back to users.
pub async fn start_task<TPlat: smoldot_light::platform::PlatformRef>(
&mut self,
from_subxt: mpsc::UnboundedReceiver<FromSubxt>,
from_node: Vec<AddedChain<TPlat>>,
) {
let from_subxt_event = tokio_stream::wrappers::UnboundedReceiverStream::new(from_subxt);
// Unsubscribe from a subscription.
fn unsubscribe(&mut self, subscription_id: &str, chain_id: smoldot_light::ChainId) {
let Some(active_subscription) = self.subscriptions.remove(subscription_id) else {
// Subscription doesn't exist so nothing more to do.
return;
};
let from_node = from_node.into_iter().map(|rpc| {
Box::pin(futures::stream::unfold(rpc, |mut rpc| async move {
let response = rpc.rpc_responses.next().await;
Some(((response, rpc.chain_id), rpc))
}))
});
let stream_combinator = futures::stream::select_all(from_node);
// Build a call to unsubscribe from this method.
let unsub_id = self.next_id();
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{}", "method":"{}","params":["{}"]}}"#,
unsub_id, active_subscription.unsubscribe_method, subscription_id
);
tokio::pin!(from_subxt_event, stream_combinator);
let mut from_subxt_event_fut = from_subxt_event.next();
let mut from_node_event_fut = stream_combinator.next();
loop {
match future::select(from_subxt_event_fut, from_node_event_fut).await {
// Message received from subxt.
Either::Left((subxt_message, previous_fut)) => {
let Some(message) = subxt_message else {
tracing::trace!(target: LOG_TARGET, "Subxt channel closed");
break;
};
tracing::trace!(
target: LOG_TARGET,
"Received register message {:?}",
message
);
self.handle_requests(message).await;
from_subxt_event_fut = from_subxt_event.next();
from_node_event_fut = previous_fut;
}
// Message received from rpc handler: lightclient response.
Either::Right((node_message, previous_fut)) => {
let Some((node_message, chain)) = node_message else {
tracing::trace!(target: LOG_TARGET, "Smoldot closed all RPC channels");
break;
};
// Smoldot returns `None` if the chain has been removed (which subxt does not remove).
let Some(response) = node_message else {
tracing::trace!(target: LOG_TARGET, "Smoldot RPC responses channel closed");
break;
};
tracing::trace!(
target: LOG_TARGET,
"Received smoldot RPC chain {:?} result {:?}",
chain, response
);
self.handle_rpc_response(chain, response);
// Advance backend, save frontend.
from_subxt_event_fut = previous_fut;
from_node_event_fut = stream_combinator.next();
}
}
// Submit it.
if let Err(err) = self.client.json_rpc_request(request, chain_id) {
tracing::warn!(
target: LOG_TARGET,
"Failed to unsubscribe id={subscription_id} chain={chain_id:?} method={:?} err={err:?}", active_subscription.unsubscribe_method
);
} else {
tracing::debug!(target: LOG_TARGET,"Unsubscribe id={subscription_id} chain={chain_id:?} method={:?}", active_subscription.unsubscribe_method);
}
tracing::trace!(target: LOG_TARGET, "Task closed");
}
}
/// The RPC response from the light-client.
/// This can either be a response of a method, or a notification from a subscription.
#[derive(Debug, Clone)]
enum RpcResponse {
Method {
/// Response ID.
id: String,
/// The result of the method call.
result: Box<RawValue>,
},
Subscription {
/// RPC method that generated the notification.
method: String,
/// Subscription ID.
id: String,
/// Result.
result: Box<RawValue>,
},
Error {
/// Response ID.
id: String,
/// Error.
error: Box<RawValue>,
},
}
impl std::str::FromStr for RpcResponse {
type Err = serde_json::Error;
fn from_str(response: &str) -> Result<Self, Self::Err> {
// Helper structures to deserialize from raw RPC strings.
#[derive(Deserialize, Debug)]
struct Response {
/// JSON-RPC version.
#[allow(unused)]
jsonrpc: String,
/// Result.
result: Box<RawValue>,
/// Request ID
id: String,
}
#[derive(Deserialize)]
struct NotificationParams {
/// The ID of the subscription.
subscription: String,
/// Result.
result: Box<RawValue>,
}
#[derive(Deserialize)]
struct ResponseNotification {
/// JSON-RPC version.
#[allow(unused)]
jsonrpc: String,
/// RPC method that generated the notification.
method: String,
/// Result.
params: NotificationParams,
}
#[derive(Deserialize)]
struct ErrorResponse {
/// JSON-RPC version.
#[allow(unused)]
jsonrpc: String,
/// Request ID.
id: String,
/// Error.
error: Box<RawValue>,
}
// Check if the response can be mapped as an RPC method response.
let result: Result<Response, _> = serde_json::from_str(response);
if let Ok(response) = result {
return Ok(RpcResponse::Method {
id: response.id,
result: response.result,
});
}
let result: Result<ResponseNotification, _> = serde_json::from_str(response);
if let Ok(notification) = result {
return Ok(RpcResponse::Subscription {
id: notification.params.subscription,
method: notification.method,
result: notification.params.result,
});
}
let error: ErrorResponse = serde_json::from_str(response)?;
Ok(RpcResponse::Error {
id: error.id,
error: error.error,
})
}
}
+71
View File
@@ -0,0 +1,71 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use serde_json::Value;
use std::borrow::Cow;
/// Something went wrong building chain config.
#[non_exhaustive]
#[derive(thiserror::Error, Debug)]
pub enum ChainConfigError {
/// The provided chain spec is the wrong shape.
#[error("Invalid chain spec format")]
InvalidSpecFormat,
}
/// Configuration to connect to a chain.
pub struct ChainConfig<'a> {
// The chain spec to use.
chain_spec: Cow<'a, str>,
}
impl<'a> From<&'a str> for ChainConfig<'a> {
fn from(chain_spec: &'a str) -> Self {
ChainConfig::chain_spec(chain_spec)
}
}
impl<'a> From<String> for ChainConfig<'a> {
fn from(chain_spec: String) -> Self {
ChainConfig::chain_spec(chain_spec)
}
}
impl<'a> ChainConfig<'a> {
/// Construct a chain config from a chain spec.
pub fn chain_spec(chain_spec: impl Into<Cow<'a, str>>) -> Self {
ChainConfig {
chain_spec: chain_spec.into(),
}
}
/// Set the bootnodes to the given ones.
pub fn set_bootnodes<S: AsRef<str>>(
self,
bootnodes: impl IntoIterator<Item = S>,
) -> Result<Self, ChainConfigError> {
let mut chain_spec_json: Value = serde_json::from_str(&self.chain_spec)
.map_err(|_e| ChainConfigError::InvalidSpecFormat)?;
if let Value::Object(map) = &mut chain_spec_json {
let bootnodes = bootnodes
.into_iter()
.map(|s| Value::String(s.as_ref().to_owned()))
.collect();
map.insert("bootNodes".to_string(), Value::Array(bootnodes));
} else {
return Err(ChainConfigError::InvalidSpecFormat);
}
Ok(ChainConfig {
chain_spec: Cow::Owned(chain_spec_json.to_string()),
})
}
// Used internally to fetch the chain spec back out.
pub(crate) fn as_chain_spec(&self) -> &str {
&self.chain_spec
}
}
+239 -32
View File
@@ -2,52 +2,259 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Low level light client implementation for RPC method and
//! subscriptions requests.
//!
//! The client implementation supports both native and wasm
//! environments.
//!
//! This leverages the smoldot crate to connect to the chain.
//! A wrapper around [`smoldot_light`] which provides an light client capable of connecting
//! to Substrate based chains.
#![deny(missing_docs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[cfg(any(
all(feature = "web", feature = "native"),
not(any(feature = "web", feature = "native"))
))]
compile_error!("subxt: exactly one of the 'web' and 'native' features should be used.");
compile_error!("subxt-lightclient: exactly one of the 'web' and 'native' features should be used.");
mod background;
mod client;
mod platform;
mod shared_client;
// mod receiver;
mod background;
mod chain_config;
mod rpc;
// Used to enable the js feature for wasm.
#[cfg(feature = "web")]
#[allow(unused_imports)]
pub use getrandom as _;
use background::{BackgroundTask, BackgroundTaskHandle};
use futures::Stream;
use platform::DefaultPlatform;
use serde_json::value::RawValue;
use shared_client::SharedClient;
use std::future::Future;
use tokio::sync::mpsc;
pub use client::{AddedChain, LightClientRpc, RawLightClientRpc};
pub use chain_config::{ChainConfig, ChainConfigError};
/// Re-exports of the smoldot related objects.
pub mod smoldot {
pub use smoldot_light::{
platform::PlatformRef, AddChainConfig, AddChainConfigJsonRpc, ChainId, Client,
JsonRpcResponses,
};
#[cfg(feature = "native")]
#[cfg_attr(docsrs, doc(cfg(feature = "native")))]
pub use smoldot_light::platform::default::DefaultPlatform;
}
/// Light client error.
/// Things that can go wrong when constructing the [`LightClient`].
#[derive(Debug, thiserror::Error)]
pub enum LightClientRpcError {
pub enum LightClientError {
/// Error encountered while adding the chain to the light-client.
#[error("Failed to add the chain to the light client: {0}.")]
AddChainError(String),
/// Error originated while trying to submit a RPC request.
#[error("RPC request cannot be sent: {0}.")]
Request(String),
}
/// Things that can go wrong calling methods of [`LightClientRpc`].
#[derive(Debug, thiserror::Error)]
pub enum LightClientRpcError {
/// Error response from the JSON-RPC server.
#[error("{0}")]
JsonRpcError(JsonRpcError),
/// Smoldot could not handle the RPC call.
#[error("Smoldot could not handle the RPC call: {0}.")]
SmoldotError(String),
/// Background task dropped.
#[error("The background task was dropped.")]
BackgroundTaskDropped,
}
/// An error response from the JSON-RPC server (ie smoldot) in response to
/// a method call or as a subscription notification.
#[derive(Debug, thiserror::Error)]
#[error("RPC Error: {0}.")]
pub struct JsonRpcError(Box<RawValue>);
/// This represents a single light client connection to the network. Instantiate
/// it with [`LightClient::relay_chain()`] to communicate with a relay chain, and
/// then call [`LightClient::parachain()`] to establish connections to parachains.
#[derive(Clone)]
pub struct LightClient {
client: SharedClient<DefaultPlatform>,
relay_chain_id: smoldot_light::ChainId,
}
impl LightClient {
/// Given a chain spec, establish a connection to a relay chain. Any subsequent calls to
/// [`LightClient::parachain()`] will set this as the relay chain.
///
/// # Panics
///
/// The panic behaviour depends on the feature flag being used:
///
/// ## Native
///
/// Panics when called outside of a `tokio` runtime context.
///
/// ## Web
///
/// If smoldot panics, then the promise created will be leaked. For more details, see
/// https://docs.rs/wasm-bindgen-futures/latest/wasm_bindgen_futures/fn.future_to_promise.html.
pub fn relay_chain<'a>(
chain_config: impl Into<ChainConfig<'a>>,
) -> Result<(Self, LightClientRpc), LightClientError> {
let mut client = smoldot_light::Client::new(platform::build_platform());
let chain_config = chain_config.into();
let chain_spec = chain_config.as_chain_spec();
let config = smoldot_light::AddChainConfig {
specification: chain_spec,
json_rpc: smoldot_light::AddChainConfigJsonRpc::Enabled {
max_pending_requests: u32::MAX.try_into().unwrap(),
max_subscriptions: u32::MAX,
},
database_content: "",
potential_relay_chains: std::iter::empty(),
user_data: (),
};
let added_chain = client
.add_chain(config)
.map_err(|err| LightClientError::AddChainError(err.to_string()))?;
let relay_chain_id = added_chain.chain_id;
let rpc_responses = added_chain
.json_rpc_responses
.expect("Light client RPC configured; qed");
let shared_client: SharedClient<_> = client.into();
let light_client_rpc =
LightClientRpc::new_raw(shared_client.clone(), relay_chain_id, rpc_responses);
let light_client = Self {
client: shared_client,
relay_chain_id,
};
Ok((light_client, light_client_rpc))
}
/// Given a chain spec, establish a connection to a parachain.
///
/// # Panics
///
/// The panic behaviour depends on the feature flag being used:
///
/// ## Native
///
/// Panics when called outside of a `tokio` runtime context.
///
/// ## Web
///
/// If smoldot panics, then the promise created will be leaked. For more details, see
/// https://docs.rs/wasm-bindgen-futures/latest/wasm_bindgen_futures/fn.future_to_promise.html.
pub fn parachain<'a>(
&self,
chain_config: impl Into<ChainConfig<'a>>,
) -> Result<LightClientRpc, LightClientError> {
let chain_config = chain_config.into();
let chain_spec = chain_config.as_chain_spec();
let config = smoldot_light::AddChainConfig {
specification: chain_spec,
json_rpc: smoldot_light::AddChainConfigJsonRpc::Enabled {
max_pending_requests: u32::MAX.try_into().unwrap(),
max_subscriptions: u32::MAX,
},
database_content: "",
potential_relay_chains: std::iter::once(self.relay_chain_id),
user_data: (),
};
let added_chain = self
.client
.add_chain(config)
.map_err(|err| LightClientError::AddChainError(err.to_string()))?;
let chain_id = added_chain.chain_id;
let rpc_responses = added_chain
.json_rpc_responses
.expect("Light client RPC configured; qed");
Ok(LightClientRpc::new_raw(
self.client.clone(),
chain_id,
rpc_responses,
))
}
}
/// This represents a single RPC connection to a specific chain, and is constructed by calling
/// one of the methods on [`LightClient`]. Using this, you can make RPC requests to the chain.
#[derive(Clone, Debug)]
pub struct LightClientRpc {
handle: BackgroundTaskHandle,
}
impl LightClientRpc {
// Dev note: this would provide a "low leveL" interface if one is needed.
// Do we actually need to provide this, or can we entirely hide Smoldot?
pub(crate) fn new_raw<TPlat, TChain>(
client: impl Into<SharedClient<TPlat, TChain>>,
chain_id: smoldot_light::ChainId,
rpc_responses: smoldot_light::JsonRpcResponses,
) -> Self
where
TPlat: smoldot_light::platform::PlatformRef + Send + 'static,
TChain: Send + 'static,
{
let (background_task, background_handle) =
BackgroundTask::new(client.into(), chain_id, rpc_responses);
// For now we spawn the background task internally, but later we can expose
// methods to give this back to the user so that they can exert backpressure.
spawn(async move { background_task.run().await });
LightClientRpc {
handle: background_handle,
}
}
/// Make an RPC request to a chain, getting back a result.
pub async fn request(
&self,
method: String,
params: Option<Box<RawValue>>,
) -> Result<Box<RawValue>, LightClientRpcError> {
self.handle.request(method, params).await
}
/// Subscribe to some RPC method, getting back a stream of notifications.
pub async fn subscribe(
&self,
method: String,
params: Option<Box<RawValue>>,
unsub: String,
) -> Result<LightClientRpcSubscription, LightClientRpcError> {
let (id, notifications) = self.handle.subscribe(method, params, unsub).await?;
Ok(LightClientRpcSubscription { id, notifications })
}
}
/// A stream of notifications handed back when [`LightClientRpc::subscribe`] is called.
pub struct LightClientRpcSubscription {
notifications: mpsc::UnboundedReceiver<Result<Box<RawValue>, JsonRpcError>>,
id: String,
}
impl LightClientRpcSubscription {
/// Return the subscription ID
pub fn id(&self) -> &str {
&self.id
}
}
impl Stream for LightClientRpcSubscription {
type Item = Result<Box<RawValue>, JsonRpcError>;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
self.notifications.poll_recv(cx)
}
}
/// A quick helper to spawn a task that works for WASM.
fn spawn<F: Future + Send + 'static>(future: F) {
#[cfg(feature = "native")]
tokio::spawn(async move {
future.await;
});
#[cfg(feature = "web")]
wasm_bindgen_futures::spawn_local(async move {
future.await;
});
}
+5 -5
View File
@@ -11,16 +11,16 @@ mod wasm_platform;
#[cfg(feature = "web")]
mod wasm_socket;
pub use helpers::build_platform;
pub use helpers::{build_platform, DefaultPlatform};
#[cfg(feature = "native")]
mod helpers {
use smoldot_light::platform::default::DefaultPlatform as Platform;
use std::sync::Arc;
pub type PlatformType = Arc<Platform>;
pub type DefaultPlatform = Arc<Platform>;
pub fn build_platform() -> PlatformType {
pub fn build_platform() -> DefaultPlatform {
Platform::new(
"subxt-light-client".into(),
env!("CARGO_PKG_VERSION").into(),
@@ -32,9 +32,9 @@ mod helpers {
mod helpers {
use super::wasm_platform::SubxtPlatform as Platform;
pub type PlatformType = Platform;
pub type DefaultPlatform = Platform;
pub fn build_platform() -> PlatformType {
pub fn build_platform() -> DefaultPlatform {
Platform::new()
}
}
+2 -2
View File
@@ -124,14 +124,14 @@ impl PlatformRef for SubxtPlatform {
port,
} => {
let addr = SocketAddr::from((ip, port));
format!("ws://{}", addr.to_string())
format!("ws://{}", addr)
}
Address::WebSocketIp {
ip: IpAddr::V6(ip),
port,
} => {
let addr = SocketAddr::from((ip, port));
format!("ws://{}", addr.to_string())
format!("ws://{}", addr)
}
// The API user of the `PlatformRef` trait is never supposed to open connections of
+1 -1
View File
@@ -111,7 +111,7 @@ impl WasmSocket {
let mut inner = inner.lock().expect("Mutex is poised; qed");
let bytes = js_sys::Uint8Array::new(&buffer).to_vec();
inner.data.extend(bytes.into_iter());
inner.data.extend(bytes);
if let Some(waker) = inner.waker.take() {
waker.wake();
+132
View File
@@ -0,0 +1,132 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use serde::Deserialize;
use serde_json::value::RawValue;
/// The RPC response from the light-client.
/// This can either be a response of a method, or a notification from a subscription.
#[derive(Debug, Clone)]
pub enum RpcResponse {
Method {
/// Response ID.
id: String,
/// The result of the method call.
result: Box<RawValue>,
},
MethodError {
/// Response ID.
id: String,
/// Error.
error: Box<RawValue>,
},
Notification {
/// RPC method that generated the notification.
method: String,
/// Subscription ID.
subscription_id: String,
/// Result.
result: Box<RawValue>,
},
NotificationError {
/// RPC method that generated the notification.
method: String,
/// Subscription ID.
subscription_id: String,
/// Result.
error: Box<RawValue>,
},
}
impl std::str::FromStr for RpcResponse {
type Err = ();
fn from_str(response: &str) -> Result<Self, Self::Err> {
// Valid response
#[derive(Deserialize, Debug)]
struct Response {
#[allow(unused)]
jsonrpc: String,
id: String,
result: Box<RawValue>,
}
// Error response
#[derive(Deserialize)]
struct ResponseError {
#[allow(unused)]
jsonrpc: String,
id: String,
error: Box<RawValue>,
}
// Valid notification (subscription) response
#[derive(Deserialize)]
struct Notification {
#[allow(unused)]
jsonrpc: String,
method: String,
params: NotificationResultParams,
}
#[derive(Deserialize)]
struct NotificationResultParams {
subscription: String,
result: Box<RawValue>,
}
// Error notification (subscription) response
#[derive(Deserialize)]
struct NotificationError {
#[allow(unused)]
jsonrpc: String,
method: String,
params: NotificationErrorParams,
}
#[derive(Deserialize)]
struct NotificationErrorParams {
/// The ID of the subscription.
subscription: String,
error: Box<RawValue>,
}
// Try deserializing the response payload to one of the above. We can
// do this more efficiently eg how jsonrpsee_types does.
let result: Result<Response, _> = serde_json::from_str(response);
if let Ok(response) = result {
return Ok(RpcResponse::Method {
id: response.id,
result: response.result,
});
}
let result: Result<Notification, _> = serde_json::from_str(response);
if let Ok(response) = result {
return Ok(RpcResponse::Notification {
subscription_id: response.params.subscription,
method: response.method,
result: response.params.result,
});
}
let result: Result<ResponseError, _> = serde_json::from_str(response);
if let Ok(response) = result {
return Ok(RpcResponse::MethodError {
id: response.id,
error: response.error,
});
}
let result: Result<NotificationError, _> = serde_json::from_str(response);
if let Ok(response) = result {
return Ok(RpcResponse::NotificationError {
method: response.method,
subscription_id: response.params.subscription,
error: response.params.error,
});
}
// We couldn't decode into any of the above. We could pick one of the above`
// errors to return, but there's no real point since the string is obviously
// different from any of them.
Err(())
}
}
+47
View File
@@ -0,0 +1,47 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use smoldot_light as sl;
use std::sync::{Arc, Mutex};
/// This wraps [`smoldot_light::Client`] so that it can be cloned and shared.
#[derive(Clone)]
pub struct SharedClient<TPlat: sl::platform::PlatformRef, TChain = ()> {
client: Arc<Mutex<sl::Client<TPlat, TChain>>>,
}
impl<TPlat: sl::platform::PlatformRef, TChain> From<sl::Client<TPlat, TChain>>
for SharedClient<TPlat, TChain>
{
fn from(client: sl::Client<TPlat, TChain>) -> Self {
SharedClient {
client: Arc::new(Mutex::new(client)),
}
}
}
impl<TPlat: sl::platform::PlatformRef, TChain> SharedClient<TPlat, TChain> {
/// Delegates to [`smoldot_light::Client::json_rpc_request()`].
pub(crate) fn json_rpc_request(
&self,
json_rpc_request: impl Into<String>,
chain_id: sl::ChainId,
) -> Result<(), sl::HandleRpcError> {
self.client
.lock()
.expect("mutex should not be poisoned")
.json_rpc_request(json_rpc_request, chain_id)
}
/// Delegates to [`smoldot_light::Client::add_chain()`].
pub(crate) fn add_chain(
&self,
config: sl::AddChainConfig<'_, TChain, impl Iterator<Item = sl::ChainId>>,
) -> Result<sl::AddChainSuccess, sl::AddChainError> {
self.client
.lock()
.expect("mutex should not be poisoned")
.add_chain(config)
}
}
+10 -5
View File
@@ -13,12 +13,17 @@ documentation.workspace = true
homepage.workspace = true
description = "Command line utilities for checking metadata compatibility between nodes."
[features]
default = ["std"]
std = ["scale-info/std", "frame-metadata/std"]
[dependencies]
codec = { package = "parity-scale-codec", workspace = true, features = ["derive"] }
frame-metadata = { workspace = true }
scale-info = { workspace = true }
sp-core-hashing = { workspace = true }
thiserror = { workspace = true }
scale-info = { workspace = true, default-features = false }
frame-metadata = { workspace = true, default-features = false, features = ["current", "decode"] }
codec = { package = "parity-scale-codec", workspace = true, default-features = false, features = ["derive"] }
sp-crypto-hashing = { workspace = true }
hashbrown = { workspace = true }
derive_more = { workspace = true }
[dev-dependencies]
bitvec = { workspace = true, features = ["alloc"] }
+12 -6
View File
@@ -2,31 +2,37 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use alloc::string::String;
use derive_more::Display;
mod v14;
mod v15;
/// An error emitted if something goes wrong converting [`frame_metadata`]
/// types into [`crate::Metadata`].
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
#[derive(Debug, Display, PartialEq, Eq)]
#[non_exhaustive]
pub enum TryFromError {
/// Type missing from type registry
#[error("Type id {0} is expected but not found in the type registry")]
#[display(fmt = "Type id {_0} is expected but not found in the type registry")]
TypeNotFound(u32),
/// Type was not a variant/enum type
#[error("Type {0} was not a variant/enum type, but is expected to be one")]
#[display(fmt = "Type {_0} was not a variant/enum type, but is expected to be one")]
VariantExpected(u32),
/// An unsupported metadata version was provided.
#[error("Cannot convert v{0} metadata into Metadata type")]
#[display(fmt = "Cannot convert v{_0} metadata into Metadata type")]
UnsupportedMetadataVersion(u32),
/// Type name missing from type registry
#[error("Type name {0} is expected but not found in the type registry")]
#[display(fmt = "Type name {_0} is expected but not found in the type registry")]
TypeNameNotFound(String),
/// Invalid type path.
#[error("Type has an invalid path {0}")]
#[display(fmt = "Type has an invalid path {_0}")]
InvalidTypePath(String),
}
#[cfg(feature = "std")]
impl std::error::Error for TryFromError {}
impl From<crate::Metadata> for frame_metadata::RuntimeMetadataPrefixed {
fn from(value: crate::Metadata) -> Self {
let m: frame_metadata::v15::RuntimeMetadataV15 = value.into();
+21 -18
View File
@@ -2,11 +2,15 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use std::collections::HashMap;
use super::TryFromError;
use crate::Metadata;
use alloc::borrow::ToOwned;
use alloc::string::String;
use alloc::vec;
use alloc::vec::Vec;
use core::fmt::Write;
use frame_metadata::{v14, v15};
use hashbrown::HashMap;
use scale_info::TypeDef;
impl TryFrom<v14::RuntimeMetadataV14> for Metadata {
@@ -31,27 +35,27 @@ fn v15_to_v14(mut metadata: v15::RuntimeMetadataV15) -> v14::RuntimeMetadataV14
let extrinsic_type = scale_info::Type {
path: scale_info::Path {
segments: vec![
"primitives".to_string(),
"runtime".to_string(),
"generic".to_string(),
"UncheckedExtrinsic".to_string(),
"primitives".to_owned(),
"runtime".to_owned(),
"generic".to_owned(),
"UncheckedExtrinsic".to_owned(),
],
},
type_params: vec![
scale_info::TypeParameter::<scale_info::form::PortableForm> {
name: "Address".to_string(),
name: "Address".to_owned(),
ty: Some(metadata.extrinsic.address_ty),
},
scale_info::TypeParameter::<scale_info::form::PortableForm> {
name: "Call".to_string(),
name: "Call".to_owned(),
ty: Some(metadata.extrinsic.call_ty),
},
scale_info::TypeParameter::<scale_info::form::PortableForm> {
name: "Signature".to_string(),
name: "Signature".to_owned(),
ty: Some(metadata.extrinsic.signature_ty),
},
scale_info::TypeParameter::<scale_info::form::PortableForm> {
name: "Extra".to_string(),
name: "Extra".to_owned(),
ty: Some(metadata.extrinsic.extra_ty),
},
],
@@ -312,9 +316,7 @@ fn generate_outer_enums(
) -> Result<v15::OuterEnums<scale_info::form::PortableForm>, TryFromError> {
let find_type = |name: &str| {
metadata.types.types.iter().find_map(|ty| {
let Some(ident) = ty.ty.path.ident() else {
return None;
};
let ident = ty.ty.path.ident()?;
if ident != name {
return None;
@@ -342,7 +344,7 @@ fn generate_outer_enums(
let Some(last) = call_path.last_mut() else {
return Err(TryFromError::InvalidTypePath("RuntimeCall".into()));
};
*last = "RuntimeError".to_string();
*last = "RuntimeError".to_owned();
generate_outer_error_enum_type(metadata, call_path)
};
@@ -364,11 +366,12 @@ fn generate_outer_error_enum_type(
.pallets
.iter()
.filter_map(|pallet| {
let Some(error) = &pallet.error else {
return None;
};
let error = pallet.error.as_ref()?;
let path = format!("{}Error", pallet.name);
// Note: using the `alloc::format!` macro like in `let path = format!("{}Error", pallet.name);`
// leads to linker errors about extern function `_Unwind_Resume` not being defined.
let mut path = String::new();
write!(path, "{}Error", pallet.name).expect("Cannot panic, qed;");
let ty = error.ty.id.into();
Some(scale_info::Variant {
+3 -1
View File
@@ -3,6 +3,7 @@
// see LICENSE for license details.
use super::TryFromError;
use crate::utils::variant_index::VariantIndex;
use crate::{
utils::ordered_map::OrderedMap, ArcStr, ConstantMetadata, ExtrinsicMetadata, Metadata,
@@ -10,9 +11,10 @@ use crate::{
RuntimeApiMethodParamMetadata, SignedExtensionMetadata, StorageEntryMetadata,
StorageEntryModifier, StorageEntryType, StorageHasher, StorageMetadata,
};
use alloc::borrow::ToOwned;
use frame_metadata::v15;
use hashbrown::HashMap;
use scale_info::form::PortableForm;
use std::collections::HashMap;
// Converting from V15 metadata into our Subxt repr.
mod from_v15 {
+36 -41
View File
@@ -14,14 +14,19 @@
//! 2. Obtaining [`frame_metadata::RuntimeMetadataPrefixed`], and then
//! using `.try_into()` to convert it into [`Metadata`].
#![cfg_attr(not(feature = "std"), no_std)]
#![deny(missing_docs)]
extern crate alloc;
mod from_into;
mod utils;
use alloc::string::String;
use alloc::sync::Arc;
use alloc::vec::Vec;
use hashbrown::HashMap;
use scale_info::{form::PortableForm, PortableRegistry, Variant};
use std::collections::HashMap;
use std::sync::Arc;
use utils::variant_index::VariantIndex;
use utils::{ordered_map::OrderedMap, validation::outer_enum_hashes::OuterEnumHashes};
@@ -166,45 +171,6 @@ impl Metadata {
&OuterEnumHashes::empty(),
))
}
/// Ensure that every unique type we'll be generating or referring to also has a
/// unique path, so that types with matching paths don't end up overwriting each other
/// in the codegen. We ignore any types with generics; Subxt actually endeavours to
/// de-duplicate those into single types with a generic.
pub fn ensure_unique_type_paths(&mut self) {
let mut visited_path_counts = HashMap::<Vec<String>, usize>::new();
for ty in self.types.types.iter_mut() {
// Ignore types without a path (ie prelude types).
if ty.ty.path.namespace().is_empty() {
continue;
}
let has_valid_type_params = ty.ty.type_params.iter().any(|tp| tp.ty.is_some());
// Ignore types which have generic params that the type generation will use.
// Ordinarily we'd expect that any two types with identical paths must be parameterized
// in order to share the path. However scale-info doesn't understand all forms of generics
// properly I think (eg generics that have associated types that can differ), and so in
// those cases we need to fix the paths for Subxt to generate correct code.
if has_valid_type_params {
continue;
}
// Count how many times we've seen the same path already.
let visited_count = visited_path_counts
.entry(ty.ty.path.segments.clone())
.or_default();
*visited_count += 1;
// alter the type so that if it's been seen more than once, we append a number to
// its name to ensure that every unique type has a unique path, too.
if *visited_count > 1 {
if let Some(name) = ty.ty.path.segments.last_mut() {
*name = format!("{name}{visited_count}");
}
}
}
}
}
/// Metadata for a specific pallet.
@@ -470,6 +436,35 @@ pub enum StorageHasher {
Identity,
}
impl StorageHasher {
/// The hash produced by a [`StorageHasher`] can have these two components, in order:
///
/// 1. A fixed size hash. (not present for [`StorageHasher::Identity`]).
/// 2. The SCALE encoded key that was used as an input to the hasher (only present for
/// [`StorageHasher::Twox64Concat`], [`StorageHasher::Blake2_128Concat`] or [`StorageHasher::Identity`]).
///
/// This function returns the number of bytes used to represent the first of these.
pub fn len_excluding_key(&self) -> usize {
match self {
StorageHasher::Blake2_128Concat => 16,
StorageHasher::Twox64Concat => 8,
StorageHasher::Blake2_128 => 16,
StorageHasher::Blake2_256 => 32,
StorageHasher::Twox128 => 16,
StorageHasher::Twox256 => 32,
StorageHasher::Identity => 0,
}
}
/// Returns true if the key used to produce the hash is appended to the hash itself.
pub fn ends_with_key(&self) -> bool {
matches!(
self,
StorageHasher::Blake2_128Concat | StorageHasher::Twox64Concat | StorageHasher::Identity
)
}
}
/// Is the storage entry optional, or does it have a default value.
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum StorageEntryModifier {
+9 -7
View File
@@ -2,7 +2,9 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use std::collections::HashMap;
use alloc::vec::Vec;
use core::mem;
use hashbrown::HashMap;
/// A minimal ordered map to let one search for
/// things by key or get the values in insert order.
@@ -23,7 +25,7 @@ impl<K, V> Default for OrderedMap<K, V> {
impl<K, V> OrderedMap<K, V>
where
K: PartialEq + Eq + std::hash::Hash,
K: PartialEq + Eq + core::hash::Hash,
{
/// Create a new, empty [`OrderedMap`].
pub fn new() -> Self {
@@ -47,8 +49,8 @@ where
where
F: FnMut(&V) -> bool,
{
let values = std::mem::take(&mut self.values);
let map = std::mem::take(&mut self.map);
let values = mem::take(&mut self.values);
let map = mem::take(&mut self.map);
// Filter the values, storing a map from old to new positions:
let mut new_values = Vec::new();
@@ -78,8 +80,8 @@ where
/// Get an item by its key.
pub fn get_by_key<Q>(&self, key: &Q) -> Option<&V>
where
K: std::borrow::Borrow<Q>,
Q: std::hash::Hash + Eq + ?Sized,
K: alloc::borrow::Borrow<Q>,
Q: core::hash::Hash + Eq + ?Sized,
{
self.map.get(key).and_then(|&v| self.values.get(v))
}
@@ -107,7 +109,7 @@ where
impl<K, V> FromIterator<(K, V)> for OrderedMap<K, V>
where
K: PartialEq + Eq + std::hash::Hash,
K: PartialEq + Eq + core::hash::Hash,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut map = OrderedMap::new();
+2 -1
View File
@@ -8,8 +8,9 @@ use crate::{
ExtrinsicMetadata, Metadata, OuterEnumsMetadata, PalletMetadataInner, RuntimeApiMetadataInner,
StorageEntryType,
};
use alloc::collections::BTreeMap;
use hashbrown::HashSet;
use scale_info::TypeDef;
use std::collections::{BTreeMap, HashSet};
/// Collect all type IDs needed to represent the provided pallet.
fn collect_pallet_types(pallet: &PalletMetadataInner, type_ids: &mut HashSet<u32>) {
+3 -2
View File
@@ -8,9 +8,10 @@ use crate::{
CustomMetadata, CustomValueMetadata, ExtrinsicMetadata, Metadata, PalletMetadata,
RuntimeApiMetadata, RuntimeApiMethodMetadata, StorageEntryMetadata, StorageEntryType,
};
use alloc::vec::Vec;
use hashbrown::HashMap;
use outer_enum_hashes::OuterEnumHashes;
use scale_info::{form::PortableForm, Field, PortableRegistry, TypeDef, TypeDefVariant, Variant};
use std::collections::HashMap;
pub mod outer_enum_hashes;
@@ -34,7 +35,7 @@ enum TypeBeingHashed {
/// Hashing function utilized internally.
fn hash(data: &[u8]) -> Hash {
sp_core_hashing::twox_256(data)
sp_crypto_hashing::twox_256(data)
}
/// XOR two hashes together. Only use this when you don't care about the order
@@ -1,6 +1,6 @@
//! Hash representations of the `frame_metadata::v15::OuterEnums`.
use std::collections::HashMap;
use hashbrown::HashMap;
use scale_info::{PortableRegistry, TypeDef};
+5 -3
View File
@@ -2,8 +2,10 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use alloc::borrow::ToOwned;
use alloc::string::String;
use hashbrown::HashMap;
use scale_info::{form::PortableForm, PortableRegistry, TypeDef, Variant};
use std::collections::HashMap;
/// Given some type ID and type registry, build a couple of
/// indexes to look up variants by index or name. If the ID provided
@@ -62,8 +64,8 @@ impl VariantIndex {
types: &'a PortableRegistry,
) -> Option<&'a Variant<PortableForm>>
where
String: std::borrow::Borrow<K>,
K: std::hash::Hash + Eq + ?Sized,
String: alloc::borrow::Borrow<K>,
K: core::hash::Hash + Eq + ?Sized,
{
let pos = *self.by_name.get(name)?;
let variants = Self::get(variant_id, types)?;
+3 -3
View File
@@ -10,7 +10,7 @@ use substrate_runner::SubstrateNode;
/// Run with `cargo run --bin artifacts` from the root of the repository.
fn main() {
let mut node_builder = SubstrateNode::builder();
node_builder.binary_paths(["substrate-node", "substrate"]);
node_builder.polkadot();
// Spawn the node and retrieve a ws URL to it:
let proc = node_builder
@@ -37,13 +37,13 @@ fn main() {
// Generate a metadata file that only contains a few pallets that we need for our examples.
run_cmd(
r#"cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets "Balances,Staking,System,Multisig,Timestamp,ParaInherent""#,
"cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets Balances,Staking,System,Multisig,Timestamp,ParaInherent",
Some("artifacts/polkadot_metadata_small.scale"),
);
// Generate a metadata file that contains no pallets
run_cmd(
r#"cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets """#,
"cargo run --bin subxt metadata --file artifacts/polkadot_metadata_full.scale --pallets \"\"",
Some("artifacts/polkadot_metadata_tiny.scale"),
);
+13 -11
View File
@@ -15,7 +15,8 @@ description = "Sign extrinsics to be submitted by Subxt"
keywords = ["parity", "subxt", "extrinsic", "signer"]
[features]
default = ["sr25519", "ecdsa", "subxt", "native"]
default = ["sr25519", "ecdsa", "subxt", "std"]
std = ["regex/std", "sp-crypto-hashing/std", "pbkdf2/std", "sha2/std", "hmac/std", "bip39/std", "schnorrkel/std", "secp256k1/std", "sp-core/std"]
# Pick the signer implementation(s) you need by enabling the
# corresponding features. Note: I had more difficulties getting
@@ -26,35 +27,36 @@ ecdsa = ["secp256k1"]
# Make the keypair algorithms here compatible with Subxt's Signer trait,
# so that they can be used to sign transactions for compatible chains.
subxt = ["dep:subxt"]
subxt = ["dep:subxt-core"]
# The getrandom package is used via schnorrkel. We need to enable the JS
# feature on it if compiling for the web.
web = ["getrandom/js", "subxt?/web"]
native = ["subxt?/native"]
web = ["getrandom/js"]
[dependencies]
subxt = { workspace = true, optional = true, default-features = false }
regex = { workspace = true }
subxt-core = { workspace = true, optional = true, default-features = false }
secrecy = { workspace = true }
regex = { workspace = true, features = ["unicode"] }
hex = { workspace = true }
cfg-if = { workspace = true }
codec = { package = "parity-scale-codec", workspace = true, features = ["derive"] }
sp-core-hashing = { workspace = true }
thiserror = { workspace = true }
sp-crypto-hashing = { workspace = true }
derive_more = { workspace = true }
pbkdf2 = { workspace = true }
sha2 = { workspace = true }
hmac = { workspace = true }
zeroize = { workspace = true }
bip39 = { workspace = true }
schnorrkel = { workspace = true, optional = true }
secp256k1 = { workspace = true, features = ["recovery", "global-context"], optional = true }
secrecy = { workspace = true }
secp256k1 = { workspace = true, optional = true, features = ["alloc", "recovery"] }
# We only pull this in to enable the JS flag for schnorrkel to use.
getrandom = { workspace = true, optional = true }
[dev-dependencies]
sp-core = { workspace = true, features = ["std"] }
sp-keyring = { workspace = true }
sp-core = { workspace = true }
[package.metadata.cargo-machete]
ignored = ["getrandom"]
+1 -1
View File
@@ -40,7 +40,7 @@ impl DeriveJunction {
let mut cc: [u8; JUNCTION_ID_LEN] = Default::default();
index.using_encoded(|data| {
if data.len() > JUNCTION_ID_LEN {
cc.copy_from_slice(&sp_core_hashing::blake2_256(data));
cc.copy_from_slice(&sp_crypto_hashing::blake2_256(data));
} else {
cc[0..data.len()].copy_from_slice(data);
}
+5
View File
@@ -6,8 +6,13 @@
mod derive_junction;
mod secret_uri;
// No need for the cfg other than to avoid an unused_imports lint warning.
#[cfg(any(feature = "sr25519", feature = "ecdsa"))]
mod seed_from_entropy;
pub use derive_junction::DeriveJunction;
pub use secret_uri::{SecretUri, SecretUriError, DEV_PHRASE};
#[cfg(any(feature = "sr25519", feature = "ecdsa"))]
pub use seed_from_entropy::seed_from_entropy;

Some files were not shown because too many files have changed in this diff Show More