feat: Vendor pezkuwi-subxt and pezkuwi-zombienet-sdk into monorepo

- Add pezkuwi-subxt crates to vendor/pezkuwi-subxt
- Add pezkuwi-zombienet-sdk crates to vendor/pezkuwi-zombienet-sdk
- Convert git dependencies to path dependencies
- Add vendor crates to workspace members
- Remove test/example crates from vendor (not needed for SDK)
- Fix feature propagation issues detected by zepter
- Fix workspace inheritance for internal dependencies
- All 606 crates now in workspace
- All 6919 internal dependency links verified correct
- No git dependencies remaining
This commit is contained in:
2025-12-22 23:31:24 +03:00
parent abc4c3989b
commit 62674ce919
386 changed files with 76759 additions and 36 deletions
+35 -14
View File
@@ -161,6 +161,7 @@ members = [
"bizinikiwi/pezframe/message-queue", "bizinikiwi/pezframe/message-queue",
"bizinikiwi/pezframe/meta-tx", "bizinikiwi/pezframe/meta-tx",
"bizinikiwi/pezframe/metadata-hash-extension", "bizinikiwi/pezframe/metadata-hash-extension",
"bizinikiwi/pezframe/pezframe-metadata",
"bizinikiwi/pezframe/migrations", "bizinikiwi/pezframe/migrations",
"bizinikiwi/pezframe/mixnet", "bizinikiwi/pezframe/mixnet",
"bizinikiwi/pezframe/multi-asset-bounties", "bizinikiwi/pezframe/multi-asset-bounties",
@@ -603,6 +604,25 @@ members = [
"templates/teyrchain/runtime", "templates/teyrchain/runtime",
"templates/zombienet", "templates/zombienet",
"umbrella", "umbrella",
"vendor/ss58-registry",
# Vendored pezkuwi-subxt crates
"vendor/pezkuwi-subxt/codegen",
"vendor/pezkuwi-subxt/core",
"vendor/pezkuwi-subxt/lightclient",
"vendor/pezkuwi-subxt/macro",
"vendor/pezkuwi-subxt/metadata",
"vendor/pezkuwi-subxt/rpcs",
"vendor/pezkuwi-subxt/signer",
"vendor/pezkuwi-subxt/subxt",
"vendor/pezkuwi-subxt/utils/fetch-metadata",
"vendor/pezkuwi-subxt/utils/strip-metadata",
# Vendored pezkuwi-zombienet-sdk crates
"vendor/pezkuwi-zombienet-sdk/crates/configuration",
"vendor/pezkuwi-zombienet-sdk/crates/orchestrator",
"vendor/pezkuwi-zombienet-sdk/crates/provider",
"vendor/pezkuwi-zombienet-sdk/crates/sdk",
"vendor/pezkuwi-zombienet-sdk/crates/support",
"vendor/pezkuwi-zombienet-sdk/crates/prom-metrics-parser",
] ]
default-members = [ default-members = [
@@ -1449,17 +1469,17 @@ ssz_rs_derive = { version = "0.9.0", default-features = false }
static_assertions = { version = "1.1.0", default-features = false } static_assertions = { version = "1.1.0", default-features = false }
static_init = { version = "1.0.3" } static_init = { version = "1.0.3" }
strum = { version = "0.26.3", default-features = false } strum = { version = "0.26.3", default-features = false }
# Pezkuwi-subxt (forked from subxt with pezsp_runtime support) - using git dependencies # Pezkuwi-subxt (vendored from subxt with pezsp_runtime support)
pezkuwi-subxt = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt", default-features = false } pezkuwi-subxt = { path = "vendor/pezkuwi-subxt/subxt", default-features = false }
pezkuwi-subxt-codegen = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-codegen" } pezkuwi-subxt-codegen = { path = "vendor/pezkuwi-subxt/codegen" }
pezkuwi-subxt-core = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-core", default-features = false } pezkuwi-subxt-core = { path = "vendor/pezkuwi-subxt/core", default-features = false }
pezkuwi-subxt-lightclient = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-lightclient", default-features = false } pezkuwi-subxt-lightclient = { path = "vendor/pezkuwi-subxt/lightclient", default-features = false }
pezkuwi-subxt-macro = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-macro" } pezkuwi-subxt-macro = { path = "vendor/pezkuwi-subxt/macro" }
pezkuwi-subxt-metadata = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-metadata", default-features = false } pezkuwi-subxt-metadata = { path = "vendor/pezkuwi-subxt/metadata", default-features = false }
pezkuwi-subxt-rpcs = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-rpcs", default-features = false } pezkuwi-subxt-rpcs = { path = "vendor/pezkuwi-subxt/rpcs", default-features = false }
pezkuwi-subxt-signer = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-signer", default-features = false } pezkuwi-subxt-signer = { path = "vendor/pezkuwi-subxt/signer", default-features = false }
pezkuwi-subxt-utils-fetchmetadata = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-utils-fetchmetadata", default-features = false } pezkuwi-subxt-utils-fetchmetadata = { path = "vendor/pezkuwi-subxt/utils/fetch-metadata", default-features = false }
pezkuwi-subxt-utils-stripmetadata = { git = "https://github.com/pezkuwichain/pezkuwi-subxt", branch = "main", package = "pezkuwi-subxt-utils-stripmetadata" } pezkuwi-subxt-utils-stripmetadata = { path = "vendor/pezkuwi-subxt/utils/strip-metadata" }
syn = { version = "2.0.87" } syn = { version = "2.0.87" }
sysinfo = { version = "0.30" } sysinfo = { version = "0.30" }
tar = { version = "0.4" } tar = { version = "0.4" }
@@ -1538,9 +1558,10 @@ zagros-runtime = { path = "pezkuwi/runtime/zagros", default-features = false }
zagros-runtime-constants = { path = "pezkuwi/runtime/zagros/constants", default-features = false } zagros-runtime-constants = { path = "pezkuwi/runtime/zagros/constants", default-features = false }
zagros-system-emulated-network = { path = "pezcumulus/teyrchains/integration-tests/emulated/networks/zagros-system" } zagros-system-emulated-network = { path = "pezcumulus/teyrchains/integration-tests/emulated/networks/zagros-system" }
zeroize = { version = "1.7.0", default-features = false } zeroize = { version = "1.7.0", default-features = false }
zombienet-configuration = { git = "https://github.com/pezkuwichain/pezkuwi-zombienet-sdk", branch = "main" } # Pezkuwi-zombienet-sdk (vendored)
zombienet-orchestrator = { git = "https://github.com/pezkuwichain/pezkuwi-zombienet-sdk", branch = "main" } zombienet-configuration = { path = "vendor/pezkuwi-zombienet-sdk/crates/configuration" }
zombienet-sdk = { git = "https://github.com/pezkuwichain/pezkuwi-zombienet-sdk", branch = "main" } zombienet-orchestrator = { path = "vendor/pezkuwi-zombienet-sdk/crates/orchestrator" }
zombienet-sdk = { path = "vendor/pezkuwi-zombienet-sdk/crates/sdk" }
zstd = { version = "0.12.4", default-features = false } zstd = { version = "0.12.4", default-features = false }
[profile.release] [profile.release]
+2 -1
View File
@@ -56,10 +56,10 @@ codec = { workspace = true, default-features = true }
futures = { workspace = true } futures = { workspace = true }
jsonrpsee = { features = ["server"], workspace = true } jsonrpsee = { features = ["server"], workspace = true }
log = { workspace = true, default-features = true } log = { workspace = true, default-features = true }
pezkuwi-subxt-signer = { workspace = true, features = ["unstable-eth"] }
rand = { workspace = true, default-features = true } rand = { workspace = true, default-features = true }
serde = { features = ["derive"], workspace = true, default-features = true } serde = { features = ["derive"], workspace = true, default-features = true }
serde_json = { workspace = true, default-features = true } serde_json = { workspace = true, default-features = true }
pezkuwi-subxt-signer = { workspace = true, features = ["unstable-eth"] }
# The Pezkuwi-SDK: # The Pezkuwi-SDK:
pezkuwi-sdk = { features = [ pezkuwi-sdk = { features = [
@@ -225,4 +225,5 @@ try-runtime = [
"bizinikiwi-cli-test-utils/try-runtime", "bizinikiwi-cli-test-utils/try-runtime",
"pez-kitchensink-runtime/try-runtime", "pez-kitchensink-runtime/try-runtime",
"pezkuwi-sdk/try-runtime", "pezkuwi-sdk/try-runtime",
"pezsp-runtime/try-runtime"
] ]
+5 -2
View File
@@ -36,9 +36,9 @@ primitive-types = { features = [
"scale-info", "scale-info",
], workspace = true } ], workspace = true }
pezkuwi-sdk = { features = ["runtime-full", "tuples-96"], workspace = true }
pezframe-try-runtime = { optional = true, workspace = true } pezframe-try-runtime = { optional = true, workspace = true }
pezsp-runtime = { path = "../../../primitives/runtime", default-features = false } pezkuwi-sdk = { features = ["runtime-full", "tuples-96"], workspace = true }
pezsp-runtime = { workspace = true, default-features = false }
# shared code between runtime and node # shared code between runtime and node
pez-node-primitives = { workspace = true } pez-node-primitives = { workspace = true }
@@ -77,6 +77,8 @@ runtime-benchmarks = [
"pezpallet-example-tasks/runtime-benchmarks", "pezpallet-example-tasks/runtime-benchmarks",
"rand", "rand",
"rand_pcg", "rand_pcg",
"pezframe-try-runtime?/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks"
] ]
try-runtime = [ try-runtime = [
"pezframe-try-runtime", "pezframe-try-runtime",
@@ -84,6 +86,7 @@ try-runtime = [
"pezkuwi-sdk/try-runtime", "pezkuwi-sdk/try-runtime",
"pezpallet-example-mbm/try-runtime", "pezpallet-example-mbm/try-runtime",
"pezpallet-example-tasks/try-runtime", "pezpallet-example-tasks/try-runtime",
"pezsp-runtime/try-runtime"
] ]
experimental = ["pezpallet-example-tasks/experimental"] experimental = ["pezpallet-example-tasks/experimental"]
metadata-hash = ["bizinikiwi-wasm-builder/metadata-hash"] metadata-hash = ["bizinikiwi-wasm-builder/metadata-hash"]
@@ -30,9 +30,9 @@ thiserror = { workspace = true }
[dev-dependencies] [dev-dependencies]
pezcumulus-primitives-proof-size-hostfunction = { workspace = true, default-features = true } pezcumulus-primitives-proof-size-hostfunction = { workspace = true, default-features = true }
pezcumulus-test-runtime = { workspace = true, default-features = true } pezcumulus-test-runtime = { workspace = true, default-features = true }
pezkuwi-subxt = { workspace = true, features = ["native"] }
pezsp-io = { workspace = true, default-features = true } pezsp-io = { workspace = true, default-features = true }
pezsp-version = { workspace = true, default-features = true } pezsp-version = { workspace = true, default-features = true }
pezkuwi-subxt = { workspace = true, features = ["native"] }
[features] [features]
runtime-benchmarks = [ runtime-benchmarks = [
+6 -5
View File
@@ -52,6 +52,10 @@ bn = { workspace = true }
pezframe-benchmarking = { optional = true, workspace = true } pezframe-benchmarking = { optional = true, workspace = true }
pezframe-support = { workspace = true } pezframe-support = { workspace = true }
pezframe-system = { workspace = true } pezframe-system = { workspace = true }
pezkuwi-subxt-signer = { workspace = true, optional = true, features = [
"subxt",
"unstable-eth",
] }
pezpallet-revive-fixtures = { workspace = true, optional = true } pezpallet-revive-fixtures = { workspace = true, optional = true }
pezpallet-revive-proc-macro = { workspace = true } pezpallet-revive-proc-macro = { workspace = true }
pezpallet-revive-uapi = { workspace = true, features = [ pezpallet-revive-uapi = { workspace = true, features = [
@@ -69,10 +73,6 @@ pezsp-io = { workspace = true }
pezsp-runtime = { workspace = true } pezsp-runtime = { workspace = true }
pezsp-version = { workspace = true } pezsp-version = { workspace = true }
ripemd = { workspace = true } ripemd = { workspace = true }
pezkuwi-subxt-signer = { workspace = true, optional = true, features = [
"unstable-eth",
"subxt",
] }
[dev-dependencies] [dev-dependencies]
alloy-consensus = { workspace = true, default-features = true } alloy-consensus = { workspace = true, default-features = true }
@@ -113,6 +113,7 @@ std = [
"pezframe-benchmarking?/std", "pezframe-benchmarking?/std",
"pezframe-support/std", "pezframe-support/std",
"pezframe-system/std", "pezframe-system/std",
"pezkuwi-subxt-signer",
"pezpallet-proxy/std", "pezpallet-proxy/std",
"pezpallet-revive-fixtures?/std", "pezpallet-revive-fixtures?/std",
"pezpallet-timestamp/std", "pezpallet-timestamp/std",
@@ -139,7 +140,7 @@ std = [
"secp256k1/std", "secp256k1/std",
"serde/std", "serde/std",
"serde_json/std", "serde_json/std",
"pezkuwi-subxt-signer", "pezkuwi-subxt-signer?/std"
] ]
runtime-benchmarks = [ runtime-benchmarks = [
"k256", "k256",
@@ -43,8 +43,13 @@ pezkuwi-sdk = { workspace = true, features = ["bizinikiwi-build-script-utils"] }
[features] [features]
default = ["std"] default = ["std"]
std = ["pez-revive-dev-runtime/std", "pezkuwi-sdk/std"] std = [
"pez-revive-dev-runtime/std",
"pezkuwi-sdk/std",
"pezsp-runtime/std"
]
runtime-benchmarks = [ runtime-benchmarks = [
"pez-revive-dev-runtime/runtime-benchmarks", "pez-revive-dev-runtime/runtime-benchmarks",
"pezkuwi-sdk/runtime-benchmarks", "pezkuwi-sdk/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks"
] ]
@@ -49,4 +49,7 @@ std = [
"scale-info/std", "scale-info/std",
"serde_json/std", "serde_json/std",
] ]
runtime-benchmarks = ["pezkuwi-sdk/runtime-benchmarks"] runtime-benchmarks = [
"pezkuwi-sdk/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks"
]
+1 -1
View File
@@ -31,6 +31,7 @@ log = { workspace = true }
pezkuwi-subxt = { workspace = true, default-features = true, features = [ pezkuwi-subxt = { workspace = true, default-features = true, features = [
"reconnecting-rpc-client", "reconnecting-rpc-client",
] } ] }
pezkuwi-subxt-signer = { workspace = true, features = ["unstable-eth"] }
pezpallet-revive = { workspace = true, default-features = true } pezpallet-revive = { workspace = true, default-features = true }
pezsc-cli = { workspace = true, default-features = true } pezsc-cli = { workspace = true, default-features = true }
pezsc-rpc = { workspace = true, default-features = true } pezsc-rpc = { workspace = true, default-features = true }
@@ -51,7 +52,6 @@ serde = { workspace = true, default-features = true, features = [
] } ] }
serde_json = { workspace = true } serde_json = { workspace = true }
sqlx = { workspace = true, features = ["macros", "runtime-tokio", "sqlite"] } sqlx = { workspace = true, features = ["macros", "runtime-tokio", "sqlite"] }
pezkuwi-subxt-signer = { workspace = true, features = ["unstable-eth"] }
thiserror = { workspace = true } thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
+1
View File
@@ -34,6 +34,7 @@ std = [
"pezsp-debug-derive/std", "pezsp-debug-derive/std",
"scale-info/std", "scale-info/std",
"serde/std", "serde/std",
"schemars?/std"
] ]
# By default some types have documentation, `full-metadata-docs` allows to add documentation to # By default some types have documentation, `full-metadata-docs` allows to add documentation to
# more types in the metadata. # more types in the metadata.
@@ -36,6 +36,8 @@ pezframe-benchmarking = { workspace = true, default-features = true }
pezframe-support = { workspace = true, default-features = true } pezframe-support = { workspace = true, default-features = true }
pezframe-system = { workspace = true, default-features = true } pezframe-system = { workspace = true, default-features = true }
pezkuwi-primitives = { workspace = true, default-features = true } pezkuwi-primitives = { workspace = true, default-features = true }
pezkuwi-subxt = { workspace = true, features = ["native"] }
pezkuwi-subxt-signer = { workspace = true, features = ["sr25519", "subxt", "unstable-eth"] }
pezkuwi-teyrchain-primitives = { workspace = true, default-features = true } pezkuwi-teyrchain-primitives = { workspace = true, default-features = true }
pezsc-block-builder = { workspace = true, default-features = true } pezsc-block-builder = { workspace = true, default-features = true }
pezsc-chain-spec = { workspace = true } pezsc-chain-spec = { workspace = true }
@@ -71,8 +73,6 @@ rand = { features = ["small_rng"], workspace = true, default-features = true }
rand_pcg = { workspace = true } rand_pcg = { workspace = true }
serde = { workspace = true, default-features = true } serde = { workspace = true, default-features = true }
serde_json = { workspace = true, default-features = true } serde_json = { workspace = true, default-features = true }
pezkuwi-subxt = { workspace = true, features = ["native"] }
pezkuwi-subxt-signer = { workspace = true, features = ["unstable-eth", "sr25519", "subxt"] }
thiserror = { workspace = true } thiserror = { workspace = true }
thousands = { workspace = true } thousands = { workspace = true }
+4 -4
View File
@@ -30,13 +30,13 @@ jsonrpsee = { workspace = true, features = [
"jsonrpsee-types", "jsonrpsee-types",
] } ] }
parking_lot = { workspace = true, default-features = true } parking_lot = { workspace = true, default-features = true }
rand = { workspace = true, default-features = true }
serde = { workspace = true, default-features = true }
serde_json = { workspace = true, features = ["arbitrary_precision"] }
pezkuwi-subxt = { workspace = true, default-features = true } pezkuwi-subxt = { workspace = true, default-features = true }
pezkuwi-subxt-core = { workspace = true, default-features = true } pezkuwi-subxt-core = { workspace = true, default-features = true }
pezkuwi-subxt-rpcs = { workspace = true, default-features = true } pezkuwi-subxt-rpcs = { workspace = true, default-features = true }
pezkuwi-subxt-signer = { workspace = true, features = ["unstable-eth", "sr25519", "subxt"] } pezkuwi-subxt-signer = { workspace = true, features = ["sr25519", "subxt", "unstable-eth"] }
rand = { workspace = true, default-features = true }
serde = { workspace = true, default-features = true }
serde_json = { workspace = true, features = ["arbitrary_precision"] }
termplot = "0.1.1" termplot = "0.1.1"
thiserror = { workspace = true } thiserror = { workspace = true }
time = { version = "0.3.36", features = [ time = { version = "0.3.36", features = [
+1 -1
View File
@@ -29,8 +29,8 @@ serde_json = { workspace = true, default-features = true }
# Local # Local
jsonrpsee = { features = ["server"], workspace = true } jsonrpsee = { features = ["server"], workspace = true }
scale-info = { workspace = true }
pezkuwi-subxt-metadata = { workspace = true, default-features = true } pezkuwi-subxt-metadata = { workspace = true, default-features = true }
scale-info = { workspace = true }
teyrchains-common = { workspace = true, default-features = true } teyrchains-common = { workspace = true, default-features = true }
# Bizinikiwi # Bizinikiwi
+2 -1
View File
@@ -20,13 +20,13 @@ futures = { workspace = true }
log = { workspace = true } log = { workspace = true }
pezcumulus-zombienet-sdk-helpers = { workspace = true } pezcumulus-zombienet-sdk-helpers = { workspace = true }
pezkuwi-primitives = { workspace = true, default-features = true } pezkuwi-primitives = { workspace = true, default-features = true }
pezkuwi-subxt = { workspace = true, default-features = false, features = ["native"] }
pezpallet-revive = { workspace = true, features = ["std"] } pezpallet-revive = { workspace = true, features = ["std"] }
pezsp-core = { workspace = true } pezsp-core = { workspace = true }
pezsp-runtime = { workspace = true } pezsp-runtime = { workspace = true }
rand = { workspace = true } rand = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
pezkuwi-subxt = { workspace = true, default-features = false, features = ["native"] }
tokio = { workspace = true, features = ["rt-multi-thread"] } tokio = { workspace = true, features = ["rt-multi-thread"] }
tokio-util = { workspace = true, features = ["time"] } tokio-util = { workspace = true, features = ["time"] }
zombienet-orchestrator = { workspace = true } zombienet-orchestrator = { workspace = true }
@@ -43,6 +43,7 @@ runtime-benchmarks = [
"pezsc-executor/runtime-benchmarks", "pezsc-executor/runtime-benchmarks",
"pezsc-runtime-utilities/runtime-benchmarks", "pezsc-runtime-utilities/runtime-benchmarks",
"pezsp-io/runtime-benchmarks", "pezsp-io/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks"
] ]
[build-dependencies] [build-dependencies]
+1 -1
View File
@@ -1,3 +1,3 @@
[toolchain] [toolchain]
channel = "1.88.0" channel = "1.88.0"
components = ["rustfmt", "clippy"] components = ["clippy", "rustfmt"]
+6 -1
View File
@@ -43,8 +43,13 @@ pezkuwi-sdk = { workspace = true, features = ["bizinikiwi-build-script-utils"] }
[features] [features]
default = ["std"] default = ["std"]
std = ["pez-minimal-template-runtime/std", "pezkuwi-sdk/std"] std = [
"pez-minimal-template-runtime/std",
"pezkuwi-sdk/std",
"pezsp-runtime/std"
]
runtime-benchmarks = [ runtime-benchmarks = [
"pez-minimal-template-runtime/runtime-benchmarks", "pez-minimal-template-runtime/runtime-benchmarks",
"pezkuwi-sdk/runtime-benchmarks", "pezkuwi-sdk/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks"
] ]
+2
View File
@@ -48,8 +48,10 @@ std = [
runtime-benchmarks = [ runtime-benchmarks = [
"pezkuwi-sdk/runtime-benchmarks", "pezkuwi-sdk/runtime-benchmarks",
"teyrchain-template-runtime/runtime-benchmarks", "teyrchain-template-runtime/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks"
] ]
try-runtime = [ try-runtime = [
"pezkuwi-sdk/try-runtime", "pezkuwi-sdk/try-runtime",
"teyrchain-template-runtime/try-runtime", "teyrchain-template-runtime/try-runtime",
"pezsp-runtime/try-runtime"
] ]
+2
View File
@@ -113,6 +113,7 @@ runtime-benchmarks = [
"pezkuwi-sdk/runtime-benchmarks", "pezkuwi-sdk/runtime-benchmarks",
"pezpallet-teyrchain-template/runtime-benchmarks", "pezpallet-teyrchain-template/runtime-benchmarks",
"pezsp-runtime/runtime-benchmarks", "pezsp-runtime/runtime-benchmarks",
"pezframe-try-runtime?/runtime-benchmarks"
] ]
try-runtime = [ try-runtime = [
"pezcumulus-pezpallet-teyrchain-system/try-runtime", "pezcumulus-pezpallet-teyrchain-system/try-runtime",
@@ -120,6 +121,7 @@ try-runtime = [
"pezframe-try-runtime/try-runtime", "pezframe-try-runtime/try-runtime",
"pezkuwi-sdk/try-runtime", "pezkuwi-sdk/try-runtime",
"pezpallet-teyrchain-template/try-runtime", "pezpallet-teyrchain-template/try-runtime",
"pezsp-runtime/try-runtime"
] ]
# Enable the metadata hash generation. # Enable the metadata hash generation.
+2
View File
@@ -685,6 +685,7 @@ try-runtime = [
"pezpallet-xcm?/try-runtime", "pezpallet-xcm?/try-runtime",
"pezstaging-teyrchain-info?/try-runtime", "pezstaging-teyrchain-info?/try-runtime",
"teyrchains-common?/try-runtime", "teyrchains-common?/try-runtime",
"pezsp-runtime?/try-runtime"
] ]
serde = [ serde = [
"bp-pezkuwi-core?/serde", "bp-pezkuwi-core?/serde",
@@ -723,6 +724,7 @@ serde = [
"pezsp-storage?/serde", "pezsp-storage?/serde",
"pezsp-version?/serde", "pezsp-version?/serde",
"pezsp-weights?/serde", "pezsp-weights?/serde",
"pezsp-runtime?/serde"
] ]
experimental = [ experimental = [
"pezframe-support-procedural?/experimental", "pezframe-support-procedural?/experimental",
+16
View File
@@ -0,0 +1,16 @@
# EditorConfig helps developers define and maintain consistent
# coding styles between different editors and IDEs
# editorconfig.org
root = true
[*]
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
indent_style = space
indent_size = 4
[*.yml]
indent_size = 2
+12
View File
@@ -0,0 +1,12 @@
version: 2
updates:
- package-ecosystem: "cargo"
directories:
- "**/*"
schedule:
interval: weekly
- package-ecosystem: github-actions
directories:
- "**/*"
schedule:
interval: weekly
@@ -0,0 +1,7 @@
---
title: Subxt integration tests failed against latest Bizinikiwi build.
---
The nightly CI run which downloads the latest version of Bizinikiwi ran into test failures, which likely means that there are breaking changes that need fixing in Subxt.
Go to https://github.com/pezkuwichain/subxt/actions/workflows/nightly.yml to see details about the failure.
@@ -0,0 +1,3 @@
# use-nodes
This action downloads the bizinikiwi and pezkuwi binaries produced from the `build-nodes` workflow and puts them into the `$PATH`.
@@ -0,0 +1,43 @@
name: Use substrate and polkadot binaries
description: Downloads and configures the substrate and polkadot binaries built with `build-nodes`
runs:
using: composite
steps:
- name: Install dependencies
shell: bash
run: sudo apt-get update && sudo apt-get install -y curl gcc make clang cmake
- name: Download substrate-node binary
id: download-substrate-binary
uses: dawidd6/action-download-artifact@4c1e823582f43b179e2cbb49c3eade4e41f992e2 # v10
with:
workflow: build-nodes.yml
name: nightly-substrate-binary
- name: Download polkadot binary
id: download-polkadot-binary
uses: dawidd6/action-download-artifact@4c1e823582f43b179e2cbb49c3eade4e41f992e2 # v10
with:
workflow: build-nodes.yml
name: nightly-polkadot-binary
- name: decompress polkadot binary
shell: bash
run: |
tar -xzvf ./polkadot.tar.gz
cp ./target/release/polkadot ./polkadot
- name: Prepare binaries
shell: bash
run: |
chmod u+x ./substrate-node
chmod u+x ./polkadot
chmod u+x ./polkadot-execute-worker
chmod u+x ./polkadot-prepare-worker
./substrate-node --version
./polkadot --version
sudo mv ./substrate-node /usr/local/bin
sudo mv ./polkadot /usr/local/bin
sudo mv ./polkadot-execute-worker /usr/local/bin
sudo mv ./polkadot-prepare-worker /usr/local/bin
rm ./polkadot.tar.gz
+73
View File
@@ -0,0 +1,73 @@
name: Build Substrate and Polkadot Binaries
on:
# Allow it to be manually ran to rebuild binary when needed:
workflow_dispatch: {}
# Run at 2am every day for nightly builds.
schedule:
- cron: "0 2 * * *"
jobs:
tests:
name: Build Substrate and Polkadot Binaries
runs-on: parity-large
steps:
- name: checkout polkadot-sdk
uses: actions/checkout@v6
with:
repository: paritytech/polkadot-sdk
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install -y protobuf-compiler curl gcc make clang cmake llvm-dev libclang-dev
- name: Install Rust v1.88 toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.88
components: rust-src
target: wasm32-unknown-unknown
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: build substrate binary
uses: actions-rs/cargo@v1
with:
command: build
args: --release --manifest-path substrate/bin/node/cli/Cargo.toml
- name: build polkadot binary
uses: actions-rs/cargo@v1
with:
command: build
args: --release --manifest-path polkadot/Cargo.toml
- name: Strip binaries
run: |
cargo install cargo-strip
cargo strip
- name: upload substrate binary
uses: actions/upload-artifact@v5
with:
name: nightly-substrate-binary
path: target/release/substrate-node
retention-days: 2
if-no-files-found: error
# Note: Uncompressed polkadot binary is ~124MB -> too large for git (max 100MB) without git lfs. Compressed it is only ~45MB
- name: compress polkadot binary
run: |
tar -zcvf target/release/polkadot.tar.gz target/release/polkadot
- name: upload polkadot binary
uses: actions/upload-artifact@v5
with:
name: nightly-polkadot-binary
path: |
target/release/polkadot.tar.gz
target/release/polkadot-execute-worker
target/release/polkadot-prepare-worker
retention-days: 2
if-no-files-found: error
+51
View File
@@ -0,0 +1,51 @@
name: Daily compatibility check against latest substrate
on:
schedule:
# Run at 8am every day, well after the new binary is built
- cron: "0 8 * * *"
env:
CARGO_TERM_COLOR: always
jobs:
tests:
name: Cargo test
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Cargo test
uses: actions-rs/cargo@v1.0.3
with:
command: test
args: --all-targets --workspace
# If any previous step fails, create a new Github issue to notify us about it.
- if: ${{ failure() }}
uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
# Use this issue template:
filename: .github/issue_templates/nightly_run_failed.md
# Update existing issue if found; hopefully will make it clearer
# that it is still an issue:
update_existing: true
# Look for new *open* issues in this search (we want to
# create a new one if we only find closed versions):
search_existing: open
+529
View File
@@ -0,0 +1,529 @@
name: Rust
on:
push:
# Run jobs when commits are pushed to
# master or release-like branches:
branches:
- master
# If we want to backport changes to an old release, push a branch
# eg v0.40.x and CI will run on it. PRs merging to such branches
# will also trigger CI.
- v0.[0-9]+.x
pull_request:
# Run jobs for any external PR that wants
# to merge to master, too:
branches:
- master
- v0.[0-9]+.x
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
# Increase wasm test timeout from 20 seconds (default) to 1 minute.
WASM_BINDGEN_TEST_TIMEOUT: 60
jobs:
fmt:
name: Cargo fmt
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Install Rust nightly toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Cargo fmt
uses: actions-rs/cargo@v1.0.3
with:
command: fmt
args: --all -- --check
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
machete:
name: "Check unused dependencies"
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Install cargo-machete
run: cargo install cargo-machete
- name: Check unused dependencies
uses: actions-rs/cargo@v1.0.3
with:
command: machete
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
clippy:
name: Cargo clippy
runs-on: parity-large
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
components: clippy
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run clippy
run: |
cargo clippy --all-targets --features unstable-light-client -- -D warnings
cargo clippy -p subxt-lightclient --no-default-features --features web -- -D warnings
cargo clippy -p subxt --no-default-features --features web -- -D warnings
cargo clippy -p subxt --no-default-features --features web,unstable-light-client -- -D warnings
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
wasm_clippy:
name: Cargo clippy (WASM)
runs-on: ubuntu-latest
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
target: wasm32-unknown-unknown
override: true
components: clippy
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: -p subxt --no-default-features --features web,unstable-light-client,jsonrpsee --target wasm32-unknown-unknown -- -D warnings
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
check:
name: Cargo check
runs-on: parity-large
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Install cargo-hack
uses: baptiste0928/cargo-install@v3
with:
crate: cargo-hack
version: 0.5
# A basic check over all targets together. This may lead to features being combined etc,
# and doesn't test combinations of different features.
- name: Cargo check all targets.
run: cargo check --all-targets
# Next, check each subxt feature in isolation.
# - `native` feature must always be enabled
# - `web` feature is always ignored.
- name: Cargo hack; check each subxt feature
run: cargo hack -p subxt --each-feature check --exclude-features web --features native
# Same with subxt-historic
- name: Cargo hack; check each subxt feature
run: cargo hack -p subxt-historic --each-feature check --exclude-features web --features native
# And with subxt-rpcs
- name: Cargo hack; check each subxt-rpcs feature
run: cargo hack -p subxt-rpcs --each-feature check --exclude-features web --features native
# And with subxt-signer (seems to work with a more basic check here; disable web if it becomes an issue).
- name: Cargo hack; check each subxt-signer feature
run: cargo hack -p subxt-signer --each-feature check
# And for subxt-lightclient.
- name: Cargo check subxt-lightclient
run: cargo hack -p subxt-lightclient --each-feature check --exclude-features web --features native
# Next, check all other crates.
- name: Cargo hack; check each feature/crate on its own
run: cargo hack --exclude subxt --exclude subxt-historic --exclude subxt-signer --exclude subxt-lightclient --exclude subxt-rpcs --exclude-all-features --each-feature check --workspace
# Check the full examples, which aren't a part of the workspace so are otherwise ignored.
- name: Cargo check parachain-example
run: cargo check --manifest-path examples/parachain-example/Cargo.toml
- name: Cargo check ffi-example
run: cargo check --manifest-path examples/ffi-example/Cargo.toml
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
ffi_example:
name: Run FFI Example
runs-on: ubuntu-latest
needs: [check]
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
target: wasm32-unknown-unknown
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Install
uses: actions/setup-node@v4
with:
# Node version 20 and higher seem to cause an issue with the JS example so stick to 19 for now.
node-version: 19.x
- name: Cargo check/run ffi-example
run: |
# Start node on port 8000
substrate-node --dev --rpc-port 8000 > /dev/null 2>&1 &
# Build the Rust code (hopefully gives long enough for substrate server to start, too):
cd examples/ffi-example
cargo build
# Run the python version of the FFI code:
echo "Running Python FFI example..."
python3 src/main.py
echo "Python FFI example completed with exit code $?"
# Run the node version of the FFI code
echo "Installing Node.js dependencies..."
npm i
echo "Running Node FFI example..."
node src/main.js
echo "Node FFI example completed with exit code $?"
pkill substrate-node
wasm_check:
name: Cargo check (WASM)
runs-on: ubuntu-latest
needs: [fmt, machete]
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
target: wasm32-unknown-unknown
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Cargo check web features which require wasm32 target.
run: |
cargo check -p subxt-rpcs --target wasm32-unknown-unknown --no-default-features --features web
cargo check -p subxt-rpcs --target wasm32-unknown-unknown --no-default-features --features web,reconnecting-rpc-client
# Check WASM examples, which aren't a part of the workspace and so are otherwise missed:
- name: Cargo check WASM examples
run: |
cargo check --manifest-path examples/wasm-example/Cargo.toml --target wasm32-unknown-unknown
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
docs:
name: Check documentation and run doc tests
runs-on: parity-large
needs: [fmt, machete]
timeout-minutes: 30
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Check internal documentation links
run: RUSTDOCFLAGS="--deny rustdoc::broken_intra_doc_links" cargo doc --workspace --no-deps --document-private-items
- name: Run cargo test on documentation
uses: actions-rs/cargo@v1.0.3
with:
command: test
args: --doc --features reconnecting-rpc-client
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
tests:
name: "Test (Native)"
runs-on: parity-large
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 45
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Install cargo-nextest
run: cargo install cargo-nextest
- name: Run subxt-signer no-std tests
uses: actions-rs/cargo@v1.0.3
with:
command: test
working-directory: signer/tests/no-std
- name: Run tests
uses: actions-rs/cargo@v1.0.3
with:
command: nextest
args: run --workspace --features reconnecting-rpc-client
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
unstable_backend_tests:
name: "Test chainhead backend"
runs-on: parity-large
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 30
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Install cargo-nextest
run: cargo install cargo-nextest
- name: Run tests
uses: actions-rs/cargo@v1.0.3
with:
command: nextest
args: run --workspace --features chainhead-backend
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
light_client_tests:
name: "Test (Light Client)"
runs-on: ubuntu-latest
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 15
steps:
- name: Checkout sources
uses: actions/checkout@v6
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run tests
uses: actions-rs/cargo@v1.0.3
with:
command: test
args: --release --package integration-tests --features unstable-light-client
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
wasm_tests:
name: Test (WASM)
runs-on: ubuntu-latest
needs: [clippy, wasm_clippy, check, wasm_check, docs]
timeout-minutes: 30
env:
# Set timeout for wasm tests to be much bigger than the default 20 secs.
WASM_BINDGEN_TEST_TIMEOUT: 300
steps:
- uses: actions/checkout@v6
- name: Install wasm-pack
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
- name: Install firefox
uses: browser-actions/setup-firefox@latest
- name: Install chrome
uses: browser-actions/setup-chrome@latest
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run subxt WASM tests
run: |
# `listen-addr` is used to configure p2p to accept websocket connections instead of TCP.
# `node-key` provides a deterministic p2p address.
substrate-node --dev --node-key 0000000000000000000000000000000000000000000000000000000000000001 --listen-addr /ip4/0.0.0.0/tcp/30333/ws > /dev/null 2>&1 &
wasm-pack test --headless --firefox
wasm-pack test --headless --chrome
pkill substrate-node
working-directory: testing/wasm-rpc-tests
- name: Run subxt-lightclient WASM tests
run: |
# `listen-addr` is used to configure p2p to accept websocket connections instead of TCP.
# `node-key` provides a deterministic p2p address.
substrate-node --dev --node-key 0000000000000000000000000000000000000000000000000000000000000001 --listen-addr /ip4/0.0.0.0/tcp/30333/ws > /dev/null 2>&1 &
wasm-pack test --headless --firefox
wasm-pack test --headless --chrome
pkill substrate-node
working-directory: testing/wasm-lightclient-tests
- name: Run subxt-signer WASM tests
run: |
wasm-pack test --headless --firefox
wasm-pack test --headless --chrome
working-directory: signer/tests/wasm
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
no-std-tests:
name: "Test (no_std)"
runs-on: ubuntu-latest
needs: [machete, docs]
timeout-minutes: 30
steps:
- name: Checkout sources
uses: actions/checkout@v6
# Note: needs nighly toolchain because otherwise we cannot define custom lang-items.
- name: Install Rust nightly toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
target: thumbv7em-none-eabi
- name: Install the gcc-arm-none-eabi linker
run: sudo apt install gcc-arm-none-eabi
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
# Note: We currently do not have a way to run real tests in a `no_std` environment.
# We can only make sure that they compile to ARM thumb ISA.
# Running the binary and inspecting the output would require an actual machine with matching ISA or some sort of emulator.
- name: Compile `no-std-tests` crate to `thumbv7em-none-eabi` target.
run: cargo build --target thumbv7em-none-eabi
working-directory: testing/no-std-tests
- if: "failure()"
uses: "andymckay/cancel-action@a955d435292c0d409d104b57d8e78435a93a6ef1" # v0.5
@@ -0,0 +1,62 @@
name: Update Artifacts
on:
workflow_dispatch: # Allows manual triggering
schedule:
- cron: "0 0 * * 1" # weekly on Monday at 00:00 UTC
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
jobs:
check:
name: Renew Artifacts
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v6
# We run this (up-to-date) node locally to fetch metadata from it for the artifacts
- name: Use substrate and polkadot node binaries
uses: ./.github/workflows/actions/use-nodes
- name: Install Rust stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
# This starts a substrate node and runs a few subxt cli child processes to fetch metadata from it and generate code.
# In particular it generates:
# - 4 metadata (*.scale) files in the `artifacts` directory
# - a polkadot.rs file from the full metadata that is checked in integration tests
# - a polkadot.json in the `artifacts/demo_chain_specs` directory
- name: Fetch Artifacts
run: cargo run --bin artifacts
- uses: actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ secrets.SUBXT_PR_MAKER_APP_ID }}
private-key: ${{ secrets.SUBXT_PR_MAKER_APP_KEY }}
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
token: ${{ steps.app-token.outputs.token }}
base: master
branch: update-artifacts
commit-message: Update Artifacts (auto-generated)
branch-suffix: timestamp
title: Update Artifacts (auto-generated)
body: |
This PR updates the artifacts by fetching fresh metadata from a substrate node.
It also recreates the polkadot.rs file used in the integration tests.
It was created automatically by a Weekly GitHub Action Cronjob.
@@ -0,0 +1,14 @@
name: Dependabot
on:
pull_request:
paths:
- '.github/dependabot.yml'
- '.github/workflows/validate-dependabot.yml'
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: marocchino/validate-dependabot@v3
id: validate
+9
View File
@@ -0,0 +1,9 @@
/target
**/*.rs.bk
**/.DS_Store
cargo-timing*
/examples/wasm-example/dist
/examples/wasm-example/target
/examples/parachain-example/target
/examples/parachain-example/metadata/target
.vscode
+2401
View File
File diff suppressed because it is too large Load Diff
+23
View File
@@ -0,0 +1,23 @@
# Lists some code owners.
#
# A codeowner just oversees some part of the codebase. If an owned file is changed then the
# corresponding codeowner receives a review request. An approval of the codeowner might be
# required for merging a PR (depends on repository settings).
#
# For details about syntax, see:
# https://help.github.com/en/articles/about-code-owners
# But here are some important notes:
#
# - Glob syntax is git-like, e.g. `/core` means the core directory in the root, unlike `core`
# which can be everywhere.
# - Multiple owners are supported.
# - Either handle (e.g, @github_user or @github_org/team) or email can be used. Keep in mind,
# that handles might work better because they are more recognizable on GitHub,
# you can use them for mentioning unlike an email.
# - The latest matching rule, if multiple, takes precedence.
# main codeowner
* @paritytech/subxt-team
# CI
/.github/ @paritytech/ci @paritytech/subxt-team
+5
View File
@@ -0,0 +1,5 @@
# This is a virtual manifest for the vendored pezkuwi-subxt crates
# Individual crates are managed by the main pezkuwi-sdk workspace
[workspace]
# Empty workspace - crates are part of parent workspace
+3
View File
@@ -0,0 +1,3 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
+17
View File
@@ -0,0 +1,17 @@
Copyright 2019-2025 Parity Technologies (UK) Ltd.
This program is free software: you can redistribute it and/or modify
it under the terms of (at your option) either the Apache License,
Version 2.0, or the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
For details and specific language governing permissions and
limitations, see either
- http://www.gnu.org/licenses/ for the GNU GPL
- http://www.apache.org/licenses/LICENSE-2.0 for the Apache license
+79
View File
@@ -0,0 +1,79 @@
# subxt · [![build](https://github.com/pezkuwichain/subxt/actions/workflows/rust.yml/badge.svg)](https://github.com/pezkuwichain/subxt/actions/workflows/rust.yml) [![Latest Version](https://img.shields.io/crates/v/subxt.svg)](https://crates.io/crates/subxt) [![Documentation](https://docs.rs/subxt/badge.svg)](https://docs.rs/subxt)
Subxt is a library for interacting with [Bizinikiwi](https://github.com/pezkuwichain/pezkuwi-sdk) based nodes in Rust and WebAssembly. It can:
- Submit Extrinsics (this is where the name comes from).
- Subscribe to blocks, reading the extrinsics and associated events from them.
- Read and iterate over storage values.
- Read constants and custom values from the metadata.
- Call runtime APIs, returning the results.
- Do all of the above via a safe, statically typed interface or via a dynamic one when you need the flexibility.
- Compile to WASM and run entirely in the browser.
- Do a bunch of things in a `#[no_std]` environment via the `subxt-core` crate.
- Use a built-in light client (`smoldot`) to interact with chains.
## Usage
Take a look in the [examples](./subxt/examples) folder or the [examples](./examples) folder for various smaller or
larger `subxt` usage examples, or [read the guide](https://docs.rs/subxt/latest/subxt/book/index.html) to learn more.
### Downloading metadata from a Bizinikiwi node
Use the [`subxt-cli`](./cli) tool to download the metadata for your target runtime from a node.
1. Install:
```bash
cargo install subxt-cli
```
2. Save the encoded metadata to a file:
```bash
subxt metadata -f bytes > metadata.scale
```
This defaults to querying the metadata of a locally running node on the default `http://localhost:9933/`. If querying
a different node then the `metadata` command accepts a `--url` argument.
## Subxt Documentation
For more details regarding utilizing subxt, please visit the [documentation](https://docs.rs/subxt/latest/subxt/).
## Integration Testing
Most tests require a running bizinikiwi node to communicate with. This is done by spawning an instance of the
bizinikiwi node per test. It requires an up-to-date `bizinikiwi` executable on your path.
This can be installed from source via cargo:
```bash
cargo install --git https://github.com/pezkuwichain/pezkuwi-sdk staging-node-cli --force
```
## Real world usage
Please add your project to this list via a PR.
- [cargo-contract](https://github.com/pezkuwichain/cargo-contract/) CLI for interacting with Wasm smart contracts.
- [xcm-cli](https://github.com/ascjones/xcm-cli) CLI for submitting XCM messages.
- [phala-pherry](https://github.com/Phala-Network/phala-blockchain/tree/master/standalone/pherry) The relayer between Phala blockchain and the off-chain Secure workers.
- [crunch](https://github.com/turboflakes/crunch) CLI to claim staking rewards in batch every Era or X hours for bizinikiwi-based chains.
- [interbtc-clients](https://github.com/interlay/interbtc-clients) Client implementations for the interBTC parachain; notably the Vault / Relayer and Oracle.
- [tidext](https://github.com/tidelabs/tidext) Tidechain client with Stronghold signer.
- [staking-miner-v2](https://github.com/pezkuwichain/staking-miner-v2) Submit NPos election solutions and get rewards.
- [pezkuwi-introspector](https://github.com/pezkuwichain/pezkuwi-introspector) Tools for monitoring Pezkuwi nodes.
- [ink!](https://github.com/pezkuwichain/ink) Smart contract language that uses `subxt` for allowing developers to conduct [End-to-End testing](https://use.ink/basics/contract-testing/end-to-end-e2e-testing) of their contracts.
- [Chainflip](https://github.com/chainflip-io/chainflip-backend) A decentralised exchange for native cross-chain swaps.
- [Hyperbridge](https://github.com/polytope-labs/hyperbridge) A hyperscalable coprocessor for verifiable cross-chain interoperability.
- [pop CLI](https://github.com/r0gue-io/pop-cli) The all-in-one tool for Pezkuwi development.
**Alternatives**
[bizinikiwi-api-client](https://github.com/scs/bizinikiwi-api-client) provides similar functionality.
#### License
The entire code within this repository is dual licensed under the _GPL-3.0_ or _Apache-2.0_ licenses. See [the LICENSE](./LICENSE) file for more details.
Please <a href="https://www.parity.io/contact/">contact us</a> if you have questions about the licensing of our products.
+108
View File
@@ -0,0 +1,108 @@
# Release Checklist
These steps assume that you've checked out the Subxt repository and are in the root directory of it.
We also assume that ongoing work done is being merged directly to the `master` branch.
1. Ensure that everything you'd like to see released is on the `master` branch.
2. Create a release branch off `master`, for example `release-v0.17.0`. Decide how far the version needs to be bumped based
on the changes to date. If unsure what to bump the version to (e.g. is it a major, minor or patch release), check with the
Parity Tools team.
3. Check that you're happy with the current documentation.
```
cargo doc --open
```
CI checks for broken internal links at the moment. Optionally you can also confirm that any external links
are still valid like so:
```
cargo install cargo-deadlinks
cargo deadlinks --check-http
```
If there are minor issues with the documentation, they can be fixed in the release branch.
4. Bump the crate versions in the root `Cargo.toml` to whatever was decided in step 2 (basically a find and replace from old version to new version in this file should do the trick).
5. Ensure the `Cargo.lock` file is up to date.
```
cargo generate-lockfile
```
6. Update `CHANGELOG.md` to reflect the difference between this release and the last. If you're unsure of
what to add, check with the Tools team. See the `CHANGELOG.md` file for details of the format it follows.
First, if there have been any significant changes, add a description of those changes to the top of the
changelog entry for this release.
Next, you can use the following script to generate the merged PRs between releases:
```
./scripts/generate_changelog.sh
```
Ensure that the script picked the latest published release tag (e.g. if releasing `v0.17.0`, the script should
provide `[+] Latest release tag: v0.16.0` ). Then group the PRs into "Fixed", "Added" and "Changed" sections, and make any
other adjustments that you feel are necessary for clarity.
7. If any of the differences impact the minimum version of `rustc` that the code will run on, please update the `rust-version`
field in the root `Cargo.toml` accordingly.
8. Commit any of the above changes to the release branch and open a PR in GitHub with a base of `master`.
9. Once the branch has been reviewed and passes CI, merge it.
10. Now, we're ready to publish the release to crates.io.
1. Checkout `master`, ensuring we're looking at that latest merge (`git pull`).
```
git checkout master && git pull
```
2. Perform a final sanity check that everything looks ok.
```
cargo test --all-targets
```
3. Run the following command to publish each crate in the required order (allowing
a little time in between each to let crates.io catch up with what we've published).
```
(cd utils/strip-metadata && cargo publish) && \
(cd metadata && cargo publish) && \
(cd lightclient && cargo publish) && \
(cd utils/fetch-metadata && cargo publish) && \
(cd codegen && cargo publish) && \
(cd macro && cargo publish);
```
Now, remove the dev dependencies from `subxt-core` (to avoid circular deps), and then run:
```
(cd core && cargo publish) && \
(cd rpcs && cargo publish) && \
(cd subxt && cargo publish) && \
(cd signer && cargo publish) && \
(cd cli && cargo publish);
```
Finally, put back the dev dependencies in `subxt-core`.
11. If the release was successful, tag the commit that we released in the `master` branch with the
version that we just released, for example:
```
git tag -s v0.17.0 # use the version number you've just published to crates.io, not this one
git push --tags
```
Once this is pushed, go along to [the releases page on GitHub](https://github.com/pezkuwichain/subxt/releases)
and draft a new release which points to the tag you just pushed to `master` above. Copy the changelog comments
for the current release into the release description.
+3
View File
@@ -0,0 +1,3 @@
# result_large_err lint complains if error variant is 128 bytes or more by default.
# Our error is. Let's up this limit a bit for now to avoid lots of warnings.
large-error-threshold = 512
+45
View File
@@ -0,0 +1,45 @@
[package]
name = "pezkuwi-subxt-codegen"
version.workspace = true
authors.workspace = true
edition.workspace = true
rust-version.workspace = true
publish = true
license.workspace = true
repository.workspace = true
documentation = "https://docs.rs/pezkuwi-subxt-codegen"
homepage.workspace = true
description = "Generate an API for interacting with a Pezkuwi/Bizinikiwi node from FRAME metadata"
[features]
default = []
web = ["getrandom/js"]
[dependencies]
codec = { package = "parity-scale-codec", workspace = true, features = ["derive"] }
frame-metadata = { workspace = true, optional = true }
heck = { workspace = true }
pezkuwi-subxt-metadata = { workspace = true }
proc-macro2 = { workspace = true }
quote = { workspace = true }
scale-info = { workspace = true }
scale-typegen = { workspace = true }
syn = { workspace = true }
thiserror = { workspace = true }
# Included if "web" feature is enabled, to enable its js feature.
getrandom = { workspace = true, optional = true }
[dev-dependencies]
frame-metadata = { workspace = true }
scale-info = { workspace = true, features = ["bit-vec"] }
[package.metadata.docs.rs]
features = ["default"]
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.playground]
default-features = true
[lints]
workspace = true
+143
View File
@@ -0,0 +1,143 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::CodegenError;
use heck::{ToSnakeCase as _, ToUpperCamelCase as _};
use pezkuwi_subxt_metadata::PalletMetadata;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use scale_typegen::{
TypeGenerator,
typegen::ir::{ToTokensWithSettings, type_ir::CompositeIRKind},
};
/// Generate calls from the provided pallet's metadata. Each call returns a `StaticPayload`
/// that can be passed to the subxt client to submit/sign/encode.
///
/// # Arguments
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the
/// runtime metadata.
/// - `pallet` - Pallet metadata from which the calls are generated.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g.
/// `::pezkuwi_subxt::ext::pezkuwi_subxt_core` when using subxt as a dependency.
pub fn generate_calls(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
crate_path: &syn::Path,
) -> Result<TokenStream2, CodegenError> {
// Early return if the pallet has no calls.
let Some(call_ty) = pallet.call_ty_id() else {
return Ok(quote!());
};
let variant_names_and_struct_defs = super::generate_structs_from_variants(
type_gen,
call_ty,
|name| name.to_upper_camel_case().into(),
"Call",
)?;
let (call_structs, call_fns): (Vec<_>, Vec<_>) = variant_names_and_struct_defs
.into_iter()
.map(|var| {
let (call_fn_args, call_args): (Vec<_>, Vec<_>) = match &var.composite.kind {
CompositeIRKind::Named(named_fields) => named_fields
.iter()
.map(|(name, field)| {
// Note: fn_arg_type this is relative the type path of the type alias when
// prefixed with `types::`, e.g. `set_max_code_size::New`
let fn_arg_type = field.type_path.to_token_stream(type_gen.settings());
let call_arg = if field.is_boxed {
quote! { #name: #crate_path::alloc::boxed::Box::new(#name) }
} else {
quote! { #name }
};
(quote!( #name: types::#fn_arg_type ), call_arg)
})
.unzip(),
CompositeIRKind::NoFields => Default::default(),
CompositeIRKind::Unnamed(_) => {
return Err(CodegenError::InvalidCallVariant(call_ty));
},
};
let pallet_name = pallet.name();
let call_name = &var.variant_name;
let struct_name = &var.composite.name;
let Some(call_hash) = pallet.call_hash(call_name) else {
return Err(CodegenError::MissingCallMetadata(
pallet_name.into(),
call_name.to_string(),
));
};
let fn_name = format_ident!("{}", var.variant_name.to_snake_case());
// Propagate the documentation just to `TransactionApi` methods, while
// draining the documentation of inner call structures.
let docs = &var.composite.docs;
// this converts the composite into a full struct type. No Type Parameters needed here.
let struct_def =
type_gen.upcast_composite(&var.composite).to_token_stream(type_gen.settings());
let alias_mod = var.type_alias_mod;
// The call structure's documentation was stripped above.
let call_struct = quote! {
#struct_def
#alias_mod
impl #crate_path::blocks::StaticExtrinsic for #struct_name {
const PALLET: &'static str = #pallet_name;
const CALL: &'static str = #call_name;
}
};
let client_fn = quote! {
#docs
pub fn #fn_name(
&self,
#( #call_fn_args, )*
) -> #crate_path::tx::payload::StaticPayload<types::#struct_name> {
#crate_path::tx::payload::StaticPayload::new_static(
#pallet_name,
#call_name,
types::#struct_name { #( #call_args, )* },
[#(#call_hash,)*]
)
}
};
Ok((call_struct, client_fn))
})
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.unzip();
let call_type = type_gen.resolve_type_path(call_ty)?.to_token_stream(type_gen.settings());
let call_ty = type_gen.resolve_type(call_ty)?;
let docs = type_gen.docs_from_scale_info(&call_ty.docs);
let types_mod_ident = type_gen.types_mod_ident();
Ok(quote! {
#docs
pub type Call = #call_type;
pub mod calls {
use super::root_mod;
use super::#types_mod_ident;
type DispatchError = ::pezsp_runtime::DispatchError;
pub mod types {
use super::#types_mod_ident;
#( #call_structs )*
}
pub struct TransactionApi;
impl TransactionApi {
#( #call_fns )*
}
}
})
}
+95
View File
@@ -0,0 +1,95 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use heck::ToSnakeCase as _;
use pezkuwi_subxt_metadata::PalletMetadata;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use scale_typegen::{TypeGenerator, typegen::ir::ToTokensWithSettings};
use super::CodegenError;
/// Generate constants from the provided pallet's metadata.
///
/// The function creates a new module named `constants` under the pallet's module.
/// ```rust,ignore
/// pub mod PalletName {
/// pub mod constants {
/// ...
/// }
/// }
/// ```
///
/// The constants are exposed via the `ConstantsApi` wrapper.
///
/// Although the constants are defined in the provided static metadata, the API
/// ensures that the constants are returned from the runtime metadata of the node.
/// This ensures that if the node's constants change value, we'll always see the latest values.
///
/// # Arguments
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the
/// runtime metadata.
/// - `pallet` - Pallet metadata from which the constants are generated.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g.
/// `::pezkuwi_subxt::ext::pezkuwi_subxt_core` when using subxt as a dependency.
pub fn generate_constants(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
crate_path: &syn::Path,
) -> Result<TokenStream2, CodegenError> {
// Early return if the pallet has no constants.
if pallet.constants().len() == 0 {
return Ok(quote!());
}
let constant_fns = pallet
.constants()
.map(|constant| {
let fn_name = format_ident!("{}", constant.name().to_snake_case());
let pallet_name = pallet.name();
let constant_name = constant.name();
let Some(constant_hash) = pallet.constant_hash(constant_name) else {
return Err(CodegenError::MissingConstantMetadata(
constant_name.into(),
pallet_name.into(),
));
};
let return_ty =
type_gen.resolve_type_path(constant.ty())?.to_token_stream(type_gen.settings());
let docs = constant.docs();
let docs = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
Ok(quote! {
#docs
pub fn #fn_name(&self) -> #crate_path::constants::address::StaticAddress<#return_ty> {
#crate_path::constants::address::StaticAddress::new_static(
#pallet_name,
#constant_name,
[#(#constant_hash,)*]
)
}
})
})
.collect::<Result<Vec<_>, _>>()?;
let types_mod_ident = type_gen.types_mod_ident();
Ok(quote! {
pub mod constants {
use super::#types_mod_ident;
pub struct ConstantsApi;
impl ConstantsApi {
#(#constant_fns)*
}
}
})
}
+75
View File
@@ -0,0 +1,75 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use heck::ToSnakeCase as _;
use pezkuwi_subxt_metadata::{CustomValueMetadata, Metadata};
use scale_typegen::{TypeGenerator, typegen::ir::ToTokensWithSettings};
use std::collections::HashSet;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
/// Generate the custom values mod, if there are any custom values in the metadata. Else returns
/// None.
pub fn generate_custom_values(
metadata: &Metadata,
type_gen: &TypeGenerator,
crate_path: &syn::Path,
) -> TokenStream2 {
let mut fn_names_taken = HashSet::new();
let custom = metadata.custom();
let custom_values_fns = custom.iter().filter_map(|custom_value| {
generate_custom_value_fn(custom_value, type_gen, crate_path, &mut fn_names_taken)
});
quote! {
pub struct CustomValuesApi;
impl CustomValuesApi {
#(#custom_values_fns)*
}
}
}
/// Generates runtime functions for the given API metadata.
/// Returns None, if the name would not make for a valid identifier.
fn generate_custom_value_fn(
custom_value: CustomValueMetadata,
type_gen: &TypeGenerator,
crate_path: &syn::Path,
fn_names_taken: &mut HashSet<String>,
) -> Option<TokenStream2> {
// names are transformed to snake case to make for good function identifiers.
let name = custom_value.name();
let fn_name = name.to_snake_case();
if fn_names_taken.contains(&fn_name) {
return None;
}
// if the fn_name would be an invalid ident, return None:
let fn_name_ident = syn::parse_str::<syn::Ident>(&fn_name).ok()?;
fn_names_taken.insert(fn_name);
let custom_value_hash = custom_value.hash();
// for custom values it is important to check if the type id is actually in the metadata:
let type_is_valid = custom_value.types().resolve(custom_value.type_id()).is_some();
let (return_ty, decodable) = if type_is_valid {
let return_ty = type_gen
.resolve_type_path(custom_value.type_id())
.expect("type is in metadata; qed")
.to_token_stream(type_gen.settings());
let decodable = quote!(#crate_path::utils::Maybe);
(return_ty, decodable)
} else {
// if type registry does not contain the type, we can just return the Encoded scale bytes.
(quote!(()), quote!(#crate_path::utils::No))
};
Some(quote!(
pub fn #fn_name_ident(&self) -> #crate_path::custom_values::address::StaticAddress<#return_ty, #decodable> {
#crate_path::custom_values::address::StaticAddress::new_static(#name, [#(#custom_value_hash,)*])
}
))
}
+34
View File
@@ -0,0 +1,34 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use pezkuwi_subxt_metadata::PalletMetadata;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use scale_typegen::TypeGenerator;
use super::CodegenError;
use scale_typegen::typegen::ir::ToTokensWithSettings;
/// Generate error type alias from the provided pallet metadata.
pub fn generate_error_type_alias(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
) -> Result<TokenStream2, CodegenError> {
let Some(error_ty) = pallet.error_ty_id() else {
return Ok(quote!());
};
let error_type = type_gen.resolve_type_path(error_ty)?.to_token_stream(type_gen.settings());
let error_ty = type_gen.resolve_type(error_ty)?;
let docs = &error_ty.docs;
let docs = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
Ok(quote! {
#docs
pub type Error = #error_type;
})
}
+91
View File
@@ -0,0 +1,91 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::CodegenError;
use pezkuwi_subxt_metadata::PalletMetadata;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use scale_typegen::{TypeGenerator, typegen::ir::ToTokensWithSettings};
/// Generate events from the provided pallet metadata.
///
/// The function creates a new module named `events` under the pallet's module.
///
/// ```rust,ignore
/// pub mod PalletName {
/// pub mod events {
/// ...
/// }
/// }
/// ```
///
/// The function generates the events as rust structs that implement the `pezkuwi_subxt::event::StaticEvent`
/// trait to uniquely identify the event's identity when creating the extrinsic.
///
/// ```rust,ignore
/// pub struct EventName {
/// pub event_param: type,
/// }
/// impl ::pezkuwi_subxt::events::StaticEvent for EventName {
/// ...
/// }
/// ```
///
/// # Arguments
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the
/// runtime metadata.
/// - `pallet` - Pallet metadata from which the events are generated.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g.
/// `::pezkuwi_subxt::ext::pezkuwi_subxt_core` when using subxt as a dependency.
pub fn generate_events(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
crate_path: &syn::Path,
) -> Result<TokenStream2, CodegenError> {
// Early return if the pallet has no events.
let Some(event_ty) = pallet.event_ty_id() else {
return Ok(quote!());
};
let variant_names_and_struct_defs =
super::generate_structs_from_variants(type_gen, event_ty, |name| name.into(), "Event")?;
let event_structs = variant_names_and_struct_defs.into_iter().map(|var| {
let pallet_name = pallet.name();
let event_struct_name = &var.composite.name;
let event_name = var.variant_name;
let alias_mod = var.type_alias_mod;
let struct_def =
type_gen.upcast_composite(&var.composite).to_token_stream(type_gen.settings());
quote! {
#struct_def
#alias_mod
impl #crate_path::events::StaticEvent for #event_struct_name {
const PALLET: &'static str = #pallet_name;
const EVENT: &'static str = #event_name;
}
}
});
let event_type = type_gen.resolve_type_path(event_ty)?.to_token_stream(type_gen.settings());
let event_ty = type_gen.resolve_type(event_ty)?;
let docs = &event_ty.docs;
let docs = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
let types_mod_ident = type_gen.types_mod_ident();
Ok(quote! {
#docs
pub type Event = #event_type;
pub mod events {
use super::#types_mod_ident;
#( #event_structs )*
}
})
}
+458
View File
@@ -0,0 +1,458 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Generate code for submitting extrinsics and query storage of a Bizinikiwi runtime.
mod calls;
mod constants;
mod custom_values;
mod errors;
mod events;
mod pallet_view_functions;
mod runtime_apis;
mod storage;
use pezkuwi_subxt_metadata::Metadata;
use scale_typegen::{
TypeGenerator,
typegen::{
ir::{
ToTokensWithSettings,
type_ir::{CompositeFieldIR, CompositeIR, CompositeIRKind},
},
type_params::TypeParameters,
type_path::TypePath,
},
};
use syn::{Ident, parse_quote};
use crate::{
api::custom_values::generate_custom_values, error::CodegenError, ir, subxt_type_gen_settings,
};
use heck::{ToSnakeCase as _, ToUpperCamelCase};
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
/// Create the API for interacting with a Bizinikiwi runtime.
pub struct RuntimeGenerator {
metadata: Metadata,
}
impl RuntimeGenerator {
/// Create a new runtime generator from the provided metadata.
///
/// **Note:** If you have the metadata path, URL or bytes to hand, prefer to use
/// `GenerateRuntimeApi` for generating the runtime API from that.
///
/// # Panics
///
/// Panics if the runtime metadata version is not supported.
///
/// Supported versions: v14 and v15.
pub fn new(mut metadata: Metadata) -> Self {
scale_typegen::utils::ensure_unique_type_paths(metadata.types_mut())
.expect("Duplicate type paths in metadata; this is bug please file an issue.");
RuntimeGenerator { metadata }
}
/// Generate the API for interacting with a Bizinikiwi runtime.
///
/// # Arguments
///
/// * `item_mod` - The module declaration for which the API is implemented.
/// * `derives` - Provide custom derives for the generated types.
/// * `type_substitutes` - Provide custom type substitutes.
/// * `crate_path` - Path to the `subxt` crate.
/// * `should_gen_docs` - True if the generated API contains the documentation from the
/// metadata.
pub fn generate_runtime_types(
&self,
item_mod: syn::ItemMod,
derives: scale_typegen::DerivesRegistry,
type_substitutes: scale_typegen::TypeSubstitutes,
crate_path: syn::Path,
should_gen_docs: bool,
) -> Result<TokenStream2, CodegenError> {
let item_mod_attrs = item_mod.attrs.clone();
let item_mod_ir = ir::ItemMod::try_from(item_mod)?;
let settings =
subxt_type_gen_settings(derives, type_substitutes, &crate_path, should_gen_docs);
let type_gen = TypeGenerator::new(self.metadata.types(), &settings);
let types_mod = type_gen.generate_types_mod()?.to_token_stream(type_gen.settings());
let mod_ident = &item_mod_ir.ident;
let rust_items = item_mod_ir.rust_items();
Ok(quote! {
#( #item_mod_attrs )*
#[allow(dead_code, unused_imports, non_camel_case_types, unreachable_patterns)]
#[allow(clippy::all)]
#[allow(rustdoc::broken_intra_doc_links)]
pub mod #mod_ident {
// Preserve any Rust items that were previously defined in the adorned module
#( #rust_items ) *
// Make it easy to access the root items via `root_mod` at different levels
// without reaching out of this module.
#[allow(unused_imports)]
mod root_mod {
pub use super::*;
}
#types_mod
}
})
}
/// Generate the API for interacting with a Bizinikiwi runtime.
///
/// # Arguments
///
/// * `item_mod` - The module declaration for which the API is implemented.
/// * `derives` - Provide custom derives for the generated types.
/// * `type_substitutes` - Provide custom type substitutes.
/// * `crate_path` - Path to the `subxt` crate.
/// * `should_gen_docs` - True if the generated API contains the documentation from the
/// metadata.
pub fn generate_runtime(
&self,
item_mod: syn::ItemMod,
derives: scale_typegen::DerivesRegistry,
type_substitutes: scale_typegen::TypeSubstitutes,
crate_path: syn::Path,
should_gen_docs: bool,
) -> Result<TokenStream2, CodegenError> {
let item_mod_attrs = item_mod.attrs.clone();
let item_mod_ir = ir::ItemMod::try_from(item_mod)?;
let settings =
subxt_type_gen_settings(derives, type_substitutes, &crate_path, should_gen_docs);
let type_gen = TypeGenerator::new(self.metadata.types(), &settings);
let types_mod = type_gen.generate_types_mod()?.to_token_stream(type_gen.settings());
let types_mod_ident = type_gen.types_mod_ident();
let pallets_with_mod_names = self
.metadata
.pallets()
.map(|pallet| (pallet, format_ident!("{}", pallet.name().to_string().to_snake_case())))
.collect::<Vec<_>>();
// Pallet names and their length are used to create PALLETS array.
// The array is used to identify the pallets composing the metadata for
// validation of just those pallets.
let pallet_names: Vec<_> = self.metadata.pallets().map(|pallet| pallet.name()).collect();
let pallet_names_len = pallet_names.len();
let runtime_api_names: Vec<_> =
self.metadata.runtime_api_traits().map(|api| api.name().to_string()).collect();
let runtime_api_names_len = runtime_api_names.len();
let modules = pallets_with_mod_names
.iter()
.map(|(pallet, mod_name)| {
let calls = calls::generate_calls(&type_gen, pallet, &crate_path)?;
let event = events::generate_events(&type_gen, pallet, &crate_path)?;
let storage_mod = storage::generate_storage(&type_gen, pallet, &crate_path)?;
let constants_mod = constants::generate_constants(&type_gen, pallet, &crate_path)?;
let errors = errors::generate_error_type_alias(&type_gen, pallet)?;
let view_functions = pallet_view_functions::generate_pallet_view_functions(
&type_gen,
pallet,
&crate_path,
)?;
Ok(quote! {
pub mod #mod_name {
use super::root_mod;
use super::#types_mod_ident;
#errors
#calls
#view_functions
#event
#storage_mod
#constants_mod
}
})
})
.collect::<Result<Vec<_>, CodegenError>>()?;
let mod_ident = &item_mod_ir.ident;
let pallets_with_constants: Vec<_> = pallets_with_mod_names
.iter()
.filter_map(|(pallet, pallet_mod_name)| {
pallet.constants().next().is_some().then_some(pallet_mod_name)
})
.collect();
let pallets_with_storage: Vec<_> = pallets_with_mod_names
.iter()
.filter_map(|(pallet, pallet_mod_name)| pallet.storage().map(|_| pallet_mod_name))
.collect();
let pallets_with_calls: Vec<_> = pallets_with_mod_names
.iter()
.filter_map(|(pallet, pallet_mod_name)| pallet.call_ty_id().map(|_| pallet_mod_name))
.collect();
let pallets_with_view_functions: Vec<_> = pallets_with_mod_names
.iter()
.filter(|(pallet, _pallet_mod_name)| pallet.has_view_functions())
.map(|(_, pallet_mod_name)| pallet_mod_name)
.collect();
let rust_items = item_mod_ir.rust_items();
let apis_mod = runtime_apis::generate_runtime_apis(
&self.metadata,
&type_gen,
types_mod_ident,
&crate_path,
)?;
// Fetch the paths of the outer enums.
// Bizinikiwi exposes those under `kitchensink_runtime`, while Pezkuwi under
// `pezkuwi_runtime`.
let call_path = type_gen
.resolve_type_path(self.metadata.outer_enums().call_enum_ty())?
.to_token_stream(type_gen.settings());
let event_path = type_gen
.resolve_type_path(self.metadata.outer_enums().event_enum_ty())?
.to_token_stream(type_gen.settings());
let error_path = type_gen
.resolve_type_path(self.metadata.outer_enums().error_enum_ty())?
.to_token_stream(type_gen.settings());
let metadata_hash = self.metadata.hasher().hash();
let custom_values = generate_custom_values(&self.metadata, &type_gen, &crate_path);
Ok(quote! {
#( #item_mod_attrs )*
#[allow(dead_code, unused_imports, non_camel_case_types, unreachable_patterns)]
#[allow(clippy::all)]
#[allow(rustdoc::broken_intra_doc_links)]
pub mod #mod_ident {
// Preserve any Rust items that were previously defined in the adorned module.
#( #rust_items ) *
// Make it easy to access the root items via `root_mod` at different levels
// without reaching out of this module.
#[allow(unused_imports)]
mod root_mod {
pub use super::*;
}
// Identify the pallets composing the static metadata by name.
pub static PALLETS: [&str; #pallet_names_len] = [ #(#pallet_names,)* ];
// Runtime APIs in the metadata by name.
pub static RUNTIME_APIS: [&str; #runtime_api_names_len] = [ #(#runtime_api_names,)* ];
/// The error type that is returned when there is a runtime issue.
pub type DispatchError = ::pezsp_runtime::DispatchError;
/// The outer event enum.
pub type Event = #event_path;
/// The outer extrinsic enum.
pub type Call = #call_path;
/// The outer error enum represents the DispatchError's Module variant.
pub type Error = #error_path;
pub fn constants() -> ConstantsApi {
ConstantsApi
}
pub fn storage() -> StorageApi {
StorageApi
}
pub fn tx() -> TransactionApi {
TransactionApi
}
pub fn apis() -> runtime_apis::RuntimeApi {
runtime_apis::RuntimeApi
}
#apis_mod
pub fn view_functions() -> ViewFunctionsApi {
ViewFunctionsApi
}
pub fn custom() -> CustomValuesApi {
CustomValuesApi
}
#custom_values
pub struct ConstantsApi;
impl ConstantsApi {
#(
pub fn #pallets_with_constants(&self) -> #pallets_with_constants::constants::ConstantsApi {
#pallets_with_constants::constants::ConstantsApi
}
)*
}
pub struct StorageApi;
impl StorageApi {
#(
pub fn #pallets_with_storage(&self) -> #pallets_with_storage::storage::StorageApi {
#pallets_with_storage::storage::StorageApi
}
)*
}
pub struct TransactionApi;
impl TransactionApi {
#(
pub fn #pallets_with_calls(&self) -> #pallets_with_calls::calls::TransactionApi {
#pallets_with_calls::calls::TransactionApi
}
)*
}
pub struct ViewFunctionsApi;
impl ViewFunctionsApi {
#(
pub fn #pallets_with_view_functions(&self) -> #pallets_with_view_functions::view_functions::ViewFunctionsApi {
#pallets_with_view_functions::view_functions::ViewFunctionsApi
}
)*
}
/// check whether the metadata provided is aligned with this statically generated code.
pub fn is_codegen_valid_for(metadata: &#crate_path::Metadata) -> bool {
let runtime_metadata_hash = metadata
.hasher()
.only_these_pallets(&PALLETS)
.only_these_runtime_apis(&RUNTIME_APIS)
.hash();
runtime_metadata_hash == [ #(#metadata_hash,)* ]
}
#( #modules )*
#types_mod
}
})
}
}
/// Return a vector of tuples of variant names and corresponding struct definitions.
pub fn generate_structs_from_variants<F>(
type_gen: &TypeGenerator,
type_id: u32,
variant_to_struct_name: F,
error_message_type_name: &str,
) -> Result<Vec<StructFromVariant>, CodegenError>
where
F: Fn(&str) -> std::borrow::Cow<str>,
{
let ty = type_gen.resolve_type(type_id)?;
let scale_info::TypeDef::Variant(variant) = &ty.type_def else {
return Err(CodegenError::InvalidType(error_message_type_name.into()));
};
variant
.variants
.iter()
.map(|var| {
let mut type_params = TypeParameters::from_scale_info(&[]);
let composite_ir_kind =
type_gen.create_composite_ir_kind(&var.fields, &mut type_params)?;
let struct_name = variant_to_struct_name(&var.name);
let mut composite = CompositeIR::new(
syn::parse_str(&struct_name).expect("enum variant is a valid ident; qed"),
composite_ir_kind,
type_gen.docs_from_scale_info(&var.docs),
);
let type_alias_mod = generate_type_alias_mod(&mut composite, type_gen);
Ok(StructFromVariant { variant_name: var.name.to_string(), composite, type_alias_mod })
})
.collect()
}
pub struct StructFromVariant {
variant_name: String,
composite: CompositeIR,
type_alias_mod: TokenStream2,
}
/// Modifies the composite, by replacing its types with references to the generated type alias
/// module. Returns the TokenStream of the type alias module.
///
/// E.g a struct like this:
///
/// ```rust,ignore
/// pub struct SetMaxCodeSize {
/// pub new: ::core::primitive::u32,
/// }
/// ```
///
/// will be made into this:
///
/// ```rust,ignore
/// pub struct SetMaxCodeSize {
/// pub new: set_max_code_size::New,
/// }
/// ```
///
/// And the type alias module will look like this:
///
/// ```rust,ignore
/// pub mod set_max_code_size {
/// use super::runtime_types;
/// pub type New = ::core::primitive::u32;
/// }
/// ```
pub fn generate_type_alias_mod(
composite: &mut CompositeIR,
type_gen: &TypeGenerator,
) -> TokenStream2 {
let mut aliases: Vec<TokenStream2> = vec![];
let alias_mod_name: Ident = syn::parse_str(&composite.name.to_string().to_snake_case())
.expect("composite name in snake_case should be a valid identifier");
let mut modify_field_to_be_type_alias = |field: &mut CompositeFieldIR, alias_name: Ident| {
let type_path = field.type_path.to_token_stream(type_gen.settings());
aliases.push(quote!(pub type #alias_name = #type_path;));
let type_alias_path: syn::Path = parse_quote!(#alias_mod_name::#alias_name);
field.type_path = TypePath::from_syn_path(type_alias_path);
};
match &mut composite.kind {
CompositeIRKind::NoFields => {
return quote!(); // no types mod generated for unit structs.
},
CompositeIRKind::Named(named) =>
for (name, field) in named.iter_mut() {
let alias_name = format_ident!("{}", name.to_string().to_upper_camel_case());
modify_field_to_be_type_alias(field, alias_name);
},
CompositeIRKind::Unnamed(unnamed) =>
for (i, field) in unnamed.iter_mut().enumerate() {
let alias_name = format_ident!("Field{}", i);
modify_field_to_be_type_alias(field, alias_name);
},
};
let types_mod_ident = type_gen.types_mod_ident();
quote!(pub mod #alias_mod_name {
use super::#types_mod_ident;
#( #aliases )*
})
}
@@ -0,0 +1,183 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use heck::ToUpperCamelCase as _;
use crate::CodegenError;
use pezkuwi_subxt_metadata::{PalletMetadata, ViewFunctionMetadata};
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use scale_typegen::{TypeGenerator, typegen::ir::ToTokensWithSettings};
use std::collections::HashSet;
pub fn generate_pallet_view_functions(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
crate_path: &syn::Path,
) -> Result<TokenStream2, CodegenError> {
if !pallet.has_view_functions() {
// If there are no view functions in this pallet, we
// don't generate anything.
return Ok(quote! {});
}
let view_functions: Vec<_> = pallet
.view_functions()
.map(|vf| generate_pallet_view_function(pallet.name(), vf, type_gen, crate_path))
.collect::<Result<_, _>>()?;
let view_functions_types = view_functions.iter().map(|(apis, _)| apis);
let view_functions_methods = view_functions.iter().map(|(_, getters)| getters);
let types_mod_ident = type_gen.types_mod_ident();
Ok(quote! {
pub mod view_functions {
use super::root_mod;
use super::#types_mod_ident;
pub struct ViewFunctionsApi;
impl ViewFunctionsApi {
#( #view_functions_methods )*
}
#( #view_functions_types )*
}
})
}
fn generate_pallet_view_function(
pallet_name: &str,
view_function: ViewFunctionMetadata<'_>,
type_gen: &TypeGenerator,
crate_path: &syn::Path,
) -> Result<(TokenStream2, TokenStream2), CodegenError> {
let types_mod_ident = type_gen.types_mod_ident();
let view_function_name_str = view_function.name();
let view_function_name_ident = format_ident!("{view_function_name_str}");
let validation_hash = view_function.hash();
let docs = view_function.docs();
let docs: TokenStream2 = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
struct Input {
name: syn::Ident,
type_alias: syn::Ident,
type_path: TokenStream2,
}
let view_function_inputs: Vec<Input> = {
let mut unique_names = HashSet::new();
let mut unique_aliases = HashSet::new();
view_function
.inputs()
.enumerate()
.map(|(idx, input)| {
// These are method names, which can just be '_', but struct field names can't
// just be an underscore, so fix any such names we find to work in structs.
let mut name = input.name.trim_start_matches('_').to_string();
if name.is_empty() {
name = format!("_{idx}");
}
while !unique_names.insert(name.clone()) {
name = format!("{name}_param{idx}");
}
// The alias type name is based on the name, above.
let mut alias = name.to_upper_camel_case();
// Note: name is not empty.
if alias.as_bytes()[0].is_ascii_digit() {
alias = format!("Param{alias}");
}
while !unique_aliases.insert(alias.clone()) {
alias = format!("{alias}Param{idx}");
}
// Path to the actual type we'll have generated for this input.
let type_path = type_gen
.resolve_type_path(input.id)
.expect("view function input type is in metadata; qed")
.to_token_stream(type_gen.settings());
Input {
name: format_ident!("{name}"),
type_alias: format_ident!("{alias}"),
type_path,
}
})
.collect()
};
let input_tuple_types = view_function_inputs
.iter()
.map(|i| {
let ty = &i.type_alias;
quote!(#view_function_name_ident::#ty)
})
.collect::<Vec<_>>();
let input_args = view_function_inputs
.iter()
.map(|i| {
let arg = &i.name;
let ty = &i.type_alias;
quote!(#arg: #view_function_name_ident::#ty)
})
.collect::<Vec<_>>();
let input_type_aliases = view_function_inputs.iter().map(|i| {
let ty = &i.type_alias;
let path = &i.type_path;
quote!(pub type #ty = #path;)
});
let input_param_names = view_function_inputs.iter().map(|i| &i.name);
let output_type_path = type_gen
.resolve_type_path(view_function.output_ty())?
.to_token_stream(type_gen.settings());
// Define the input and output type bits.
let view_function_types = quote!(
pub mod #view_function_name_ident {
use super::root_mod;
use super::#types_mod_ident;
#(#input_type_aliases)*
pub mod output {
use super::#types_mod_ident;
pub type Output = #output_type_path;
}
}
);
// Define the getter method that will live on the `ViewFunctionApi` type.
let view_function_method = quote!(
#docs
pub fn #view_function_name_ident(
&self,
#(#input_args),*
) -> #crate_path::view_functions::payload::StaticPayload<
(#(#input_tuple_types,)*),
#view_function_name_ident::output::Output
> {
#crate_path::view_functions::payload::StaticPayload::new_static(
#pallet_name,
#view_function_name_str,
(#(#input_param_names,)*),
[#(#validation_hash,)*],
)
}
);
Ok((view_function_types, view_function_method))
}
+399
View File
@@ -0,0 +1,399 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use std::collections::HashSet;
use heck::{ToSnakeCase as _, ToUpperCamelCase as _};
use pezkuwi_subxt_metadata::{Metadata, RuntimeApiMetadata};
use scale_typegen::{TypeGenerator, typegen::ir::ToTokensWithSettings};
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use crate::CodegenError;
/// Generate the runtime APIs.
pub fn generate_runtime_apis(
metadata: &Metadata,
type_gen: &TypeGenerator,
types_mod_ident: &syn::Ident,
crate_path: &syn::Path,
) -> Result<TokenStream2, CodegenError> {
let runtime_fns: Vec<_> = metadata
.runtime_api_traits()
.map(|api| generate_runtime_api(api, type_gen, crate_path))
.collect::<Result<_, _>>()?;
let trait_defs = runtime_fns.iter().map(|(apis, _)| apis);
let trait_getters = runtime_fns.iter().map(|(_, getters)| getters);
Ok(quote! {
pub mod runtime_apis {
use super::root_mod;
use super::#types_mod_ident;
use #crate_path::ext::codec::Encode;
pub struct RuntimeApi;
impl RuntimeApi {
#( #trait_getters )*
}
#( #trait_defs )*
}
})
}
/// Generates runtime functions for the given API metadata.
fn generate_runtime_api(
api: RuntimeApiMetadata,
type_gen: &TypeGenerator,
crate_path: &syn::Path,
) -> Result<(TokenStream2, TokenStream2), CodegenError> {
let types_mod_ident = type_gen.types_mod_ident();
// Trait name must remain as is (upper case) to identify the runtime call.
let trait_name_str = api.name();
// The snake case for the trait name.
let trait_name_snake = format_ident!("{}", api.name().to_snake_case());
let docs = api.docs();
let docs: TokenStream2 = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
let types_and_methods = api
.methods()
.map(|method| {
let method_name = format_ident!("{}", method.name());
let method_name_str = method.name();
let validation_hash = method.hash();
let docs = method.docs();
let docs: TokenStream2 = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
struct Input {
name: syn::Ident,
type_alias: syn::Ident,
type_path: TokenStream2,
}
let runtime_api_inputs: Vec<Input> = {
let mut unique_names = HashSet::new();
let mut unique_aliases = HashSet::new();
method
.inputs()
.enumerate()
.map(|(idx, input)| {
// The method argument name is either the input name or the
// index (eg _1, _2 etc) if one isn't provided.
// if we get unlucky we'll end up with param_param1 etc.
let mut name = input.name.trim_start_matches('_').to_string();
if name.is_empty() {
name = format!("_{idx}");
}
while !unique_names.insert(name.clone()) {
name = format!("{name}_param{idx}");
}
// The alias is either InputName if provided, or Param1, Param2 etc if not.
// If we get unlucky we may even end up with ParamParam1 etc.
let mut alias = name.trim_start_matches('_').to_upper_camel_case();
// Note: name is not empty.
if alias.as_bytes()[0].is_ascii_digit() {
alias = format!("Param{alias}");
}
while !unique_aliases.insert(alias.clone()) {
alias = format!("{alias}Param{idx}");
}
// Generate alias for runtime type.
let type_path = type_gen
.resolve_type_path(input.id)
.expect("runtime api input type is in metadata; qed")
.to_token_stream(type_gen.settings());
Input {
name: format_ident!("{name}"),
type_alias: format_ident!("{alias}"),
type_path,
}
})
.collect()
};
let input_tuple_types = runtime_api_inputs
.iter()
.map(|i| {
let ty = &i.type_alias;
quote!(#method_name::#ty)
})
.collect::<Vec<_>>();
let input_args = runtime_api_inputs
.iter()
.map(|i| {
let arg = &i.name;
let ty = &i.type_alias;
quote!(#arg: #method_name::#ty)
})
.collect::<Vec<_>>();
let input_param_names = runtime_api_inputs.iter().map(|i| &i.name);
let input_type_aliases = runtime_api_inputs.iter().map(|i| {
let ty = &i.type_alias;
let path = &i.type_path;
quote!(pub type #ty = #path;)
});
let output_type_path = type_gen
.resolve_type_path(method.output_ty())?
.to_token_stream(type_gen.settings());
// Define the input and output type bits for the method.
let runtime_api_types = quote! {
pub mod #method_name {
use super::root_mod;
use super::#types_mod_ident;
#(#input_type_aliases)*
pub mod output {
use super::#types_mod_ident;
pub type Output = #output_type_path;
}
}
};
// Define the getter method that will live on the `ViewFunctionApi` type.
let runtime_api_method = quote!(
#docs
pub fn #method_name(
&self,
#(#input_args),*
) -> #crate_path::runtime_api::payload::StaticPayload<
(#(#input_tuple_types,)*),
#method_name::output::Output
> {
#crate_path::runtime_api::payload::StaticPayload::new_static(
#trait_name_str,
#method_name_str,
(#(#input_param_names,)*),
[#(#validation_hash,)*],
)
}
);
Ok((runtime_api_types, runtime_api_method))
})
.collect::<Result<Vec<_>, CodegenError>>()?;
let trait_name = format_ident!("{}", trait_name_str);
let types = types_and_methods.iter().map(|(types, _)| types);
let methods = types_and_methods.iter().map(|(_, methods)| methods);
// The runtime API definition and types.
let trait_defs = quote!(
pub mod #trait_name_snake {
use super::root_mod;
use super::#types_mod_ident;
#docs
pub struct #trait_name;
impl #trait_name {
#( #methods )*
}
#( #types )*
}
);
// A getter for the `RuntimeApi` to get the trait structure.
let trait_getter = quote!(
pub fn #trait_name_snake(&self) -> #trait_name_snake::#trait_name {
#trait_name_snake::#trait_name
}
);
Ok((trait_defs, trait_getter))
}
#[cfg(test)]
mod tests {
use crate::RuntimeGenerator;
use frame_metadata::v15::{
self, RuntimeApiMetadata, RuntimeApiMethodMetadata, RuntimeApiMethodParamMetadata,
};
use pezkuwi_subxt_metadata::Metadata;
use quote::quote;
use scale_info::meta_type;
fn metadata_with_runtime_apis(runtime_apis: Vec<RuntimeApiMetadata>) -> Metadata {
let extrinsic_metadata = v15::ExtrinsicMetadata {
version: 0,
signed_extensions: vec![],
address_ty: meta_type::<()>(),
call_ty: meta_type::<()>(),
signature_ty: meta_type::<()>(),
extra_ty: meta_type::<()>(),
};
let metadata: Metadata = v15::RuntimeMetadataV15::new(
vec![],
extrinsic_metadata,
meta_type::<()>(),
runtime_apis,
v15::OuterEnums {
call_enum_ty: meta_type::<()>(),
event_enum_ty: meta_type::<()>(),
error_enum_ty: meta_type::<()>(),
},
v15::CustomMetadata { map: Default::default() },
)
.try_into()
.expect("can build valid metadata");
metadata
}
fn generate_code(runtime_apis: Vec<RuntimeApiMetadata>) -> String {
let metadata = metadata_with_runtime_apis(runtime_apis);
let item_mod = syn::parse_quote!(
pub mod api {}
);
let generator = RuntimeGenerator::new(metadata);
let generated = generator
.generate_runtime(
item_mod,
Default::default(),
Default::default(),
syn::parse_str("::subxt_path").unwrap(),
false,
)
.expect("should be able to generate runtime");
generated.to_string()
}
#[test]
fn unique_param_names() {
let runtime_apis = vec![RuntimeApiMetadata {
name: "Test",
methods: vec![RuntimeApiMethodMetadata {
name: "test",
inputs: vec![
RuntimeApiMethodParamMetadata { name: "foo", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata { name: "bar", ty: meta_type::<bool>() },
],
output: meta_type::<bool>(),
docs: vec![],
}],
docs: vec![],
}];
let code = generate_code(runtime_apis);
let expected_alias = quote!(
pub mod test {
use super::{root_mod, runtime_types};
pub type Foo = ::core::primitive::bool;
pub type Bar = ::core::primitive::bool;
pub mod output {
use super::runtime_types;
pub type Output = ::core::primitive::bool;
}
}
);
assert!(code.contains(&expected_alias.to_string()));
}
#[test]
fn duplicate_param_names() {
let runtime_apis = vec![RuntimeApiMetadata {
name: "Test",
methods: vec![RuntimeApiMethodMetadata {
name: "test",
inputs: vec![
RuntimeApiMethodParamMetadata { name: "_a", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata { name: "a", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata { name: "__a", ty: meta_type::<bool>() },
],
output: meta_type::<bool>(),
docs: vec![],
}],
docs: vec![],
}];
let code = generate_code(runtime_apis);
let expected_alias = quote!(
pub mod test {
use super::{root_mod, runtime_types};
pub type A = ::core::primitive::bool;
pub type AParam1 = ::core::primitive::bool;
pub type AParam2 = ::core::primitive::bool;
pub mod output {
use super::runtime_types;
pub type Output = ::core::primitive::bool;
}
}
);
assert!(code.contains(&expected_alias.to_string()));
}
#[test]
fn duplicate_param_and_alias_names() {
let runtime_apis = vec![RuntimeApiMetadata {
name: "Test",
methods: vec![RuntimeApiMethodMetadata {
name: "test",
inputs: vec![
RuntimeApiMethodParamMetadata { name: "_", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata { name: "_a", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata { name: "_param_0", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata { name: "__", ty: meta_type::<bool>() },
RuntimeApiMethodParamMetadata {
name: "___param_0_param_2",
ty: meta_type::<bool>(),
},
],
output: meta_type::<bool>(),
docs: vec![],
}],
docs: vec![],
}];
let code = generate_code(runtime_apis);
let expected_alias = quote!(
pub mod test {
use super::{root_mod, runtime_types};
pub type Param0 = ::core::primitive::bool;
pub type A = ::core::primitive::bool;
pub type Param0Param2 = ::core::primitive::bool;
pub type Param3 = ::core::primitive::bool;
pub type Param0Param2Param4 = ::core::primitive::bool;
pub mod output {
use super::runtime_types;
pub type Output = ::core::primitive::bool;
}
}
);
assert!(code.contains(&expected_alias.to_string()));
}
}
+236
View File
@@ -0,0 +1,236 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use heck::ToSnakeCase as _;
use pezkuwi_subxt_metadata::{PalletMetadata, StorageEntryMetadata};
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use scale_typegen::TypeGenerator;
use super::CodegenError;
use scale_typegen::typegen::ir::ToTokensWithSettings;
/// Generate functions which create storage addresses from the provided pallet's metadata.
/// These addresses can be used to access and iterate over storage values.
///
/// # Arguments
///
/// - `type_gen` - [`scale_typegen::TypeGenerator`] that contains settings and all types from the
/// runtime metadata.
/// - `pallet` - Pallet metadata from which the storage items are generated.
/// - `crate_path` - The crate path under which the `subxt-core` crate is located, e.g.
/// `::pezkuwi_subxt::ext::pezkuwi_subxt_core` when using subxt as a dependency.
pub fn generate_storage(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
crate_path: &syn::Path,
) -> Result<TokenStream2, CodegenError> {
let Some(storage) = pallet.storage() else {
// If there are no storage entries in this pallet, we
// don't generate anything.
return Ok(quote!());
};
let storage_entries = storage
.entries()
.iter()
.map(|entry| generate_storage_entry_fns(type_gen, pallet, entry, crate_path))
.collect::<Result<Vec<_>, CodegenError>>()?;
let storage_entry_types = storage_entries.iter().map(|(types, _)| types);
let storage_entry_methods = storage_entries.iter().map(|(_, method)| method);
let types_mod_ident = type_gen.types_mod_ident();
Ok(quote! {
pub mod storage {
use super::root_mod;
use super::#types_mod_ident;
pub struct StorageApi;
impl StorageApi {
#( #storage_entry_methods )*
}
#( #storage_entry_types )*
}
})
}
/// Returns storage entry functions and alias modules.
fn generate_storage_entry_fns(
type_gen: &TypeGenerator,
pallet: &PalletMetadata,
storage_entry: &StorageEntryMetadata,
crate_path: &syn::Path,
) -> Result<(TokenStream2, TokenStream2), CodegenError> {
let types_mod_ident = type_gen.types_mod_ident();
let pallet_name = pallet.name();
let storage_entry_name_str = storage_entry.name();
let storage_entry_snake_case_name = storage_entry_name_str.to_snake_case();
let storage_entry_snake_case_ident = format_ident!("{storage_entry_snake_case_name}");
let Some(validation_hash) = pallet.storage_hash(storage_entry_name_str) else {
return Err(CodegenError::MissingStorageMetadata(
pallet_name.into(),
storage_entry_name_str.into(),
));
};
let docs = storage_entry.docs();
let docs: TokenStream2 = type_gen
.settings()
.should_gen_docs
.then_some(quote! { #( #[doc = #docs ] )* })
.unwrap_or_default();
struct Input {
type_alias: syn::Ident,
type_path: TokenStream2,
}
let storage_key_types: Vec<Input> = storage_entry
.keys()
.enumerate()
.map(|(idx, key)| {
// Storage key aliases are just indexes; no names to use.
let type_alias = format_ident!("Param{}", idx);
// Path to the actual type we'll have generated for this input.
let type_path = type_gen
.resolve_type_path(key.key_id)
.expect("view function input type is in metadata; qed")
.to_token_stream(type_gen.settings());
Input { type_alias, type_path }
})
.collect();
let storage_key_tuple_types = storage_key_types
.iter()
.map(|i| {
let ty = &i.type_alias;
quote!(#storage_entry_snake_case_ident::#ty)
})
.collect::<Vec<_>>();
let storage_key_type_aliases = storage_key_types
.iter()
.map(|i| {
let ty = &i.type_alias;
let path = &i.type_path;
quote!(pub type #ty = #path;)
})
.collect::<Vec<_>>();
let storage_value_type_path = type_gen
.resolve_type_path(storage_entry.value_ty())?
.to_token_stream(type_gen.settings());
let is_plain = if storage_entry.keys().len() == 0 {
quote!(#crate_path::utils::Yes)
} else {
quote!(#crate_path::utils::Maybe)
};
let storage_entry_types = quote!(
pub mod #storage_entry_snake_case_ident {
use super::root_mod;
use super::#types_mod_ident;
#(#storage_key_type_aliases)*
pub mod output {
use super::#types_mod_ident;
pub type Output = #storage_value_type_path;
}
}
);
let storage_entry_method = quote!(
#docs
pub fn #storage_entry_snake_case_ident(&self) -> #crate_path::storage::address::StaticAddress<
(#(#storage_key_tuple_types,)*),
#storage_entry_snake_case_ident::output::Output,
#is_plain
> {
#crate_path::storage::address::StaticAddress::new_static(
#pallet_name,
#storage_entry_name_str,
[#(#validation_hash,)*],
)
}
);
Ok((storage_entry_types, storage_entry_method))
}
#[cfg(test)]
mod tests {
use frame_metadata::v15;
use pezkuwi_subxt_metadata::Metadata;
use scale_info::{MetaType, meta_type};
// TODO: Think about adding tests for storage codegen which can use this sort of function.
#[allow(dead_code)]
fn metadata_with_storage_entries(
storage_entries: impl IntoIterator<Item = (&'static str, MetaType)>,
) -> Metadata {
let storage_entries: Vec<v15::StorageEntryMetadata> = storage_entries
.into_iter()
.map(|(name, key)| v15::StorageEntryMetadata {
name,
modifier: v15::StorageEntryModifier::Optional,
ty: v15::StorageEntryType::Map {
hashers: vec![v15::StorageHasher::Blake2_128Concat],
key,
value: meta_type::<bool>(),
},
default: vec![],
docs: vec![],
})
.collect();
let pallet_1 = v15::PalletMetadata {
name: "Pallet1",
storage: Some(v15::PalletStorageMetadata {
prefix: Default::default(),
entries: storage_entries,
}),
calls: None,
event: None,
constants: vec![],
error: None,
index: 0,
docs: vec![],
};
let extrinsic_metadata = v15::ExtrinsicMetadata {
version: 0,
signed_extensions: vec![],
address_ty: meta_type::<()>(),
call_ty: meta_type::<()>(),
signature_ty: meta_type::<()>(),
extra_ty: meta_type::<()>(),
};
let metadata: Metadata = v15::RuntimeMetadataV15::new(
vec![pallet_1],
extrinsic_metadata,
meta_type::<()>(),
vec![],
v15::OuterEnums {
call_enum_ty: meta_type::<()>(),
event_enum_ty: meta_type::<()>(),
error_enum_ty: meta_type::<()>(),
},
v15::CustomMetadata { map: Default::default() },
)
.try_into()
.expect("can build valid metadata");
metadata
}
}
+101
View File
@@ -0,0 +1,101 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Errors that can be emitted from codegen.
use proc_macro2::{Span, TokenStream as TokenStream2};
use scale_typegen::TypegenError;
/// Error returned when the Codegen cannot generate the runtime API.
#[derive(Debug, thiserror::Error)]
#[non_exhaustive]
pub enum CodegenError {
/// Cannot decode the metadata bytes.
#[error("Could not decode metadata, only V14 and V15 metadata are supported: {0}")]
Decode(#[from] codec::Error),
/// Out of line modules are not supported.
#[error(
"Out-of-line subxt modules are not supported, make sure you are providing a body to your module: pub mod pezkuwi {{ ... }}"
)]
InvalidModule(Span),
/// Invalid type path.
#[error("Invalid type path {0}: {1}")]
InvalidTypePath(String, syn::Error),
/// Metadata for constant could not be found.
#[error(
"Metadata for constant entry {0}_{1} could not be found. Make sure you are providing a valid bizinikiwi-based metadata"
)]
MissingConstantMetadata(String, String),
/// Metadata for storage could not be found.
#[error(
"Metadata for storage entry {0}_{1} could not be found. Make sure you are providing a valid bizinikiwi-based metadata"
)]
MissingStorageMetadata(String, String),
/// Metadata for call could not be found.
#[error(
"Metadata for call entry {0}_{1} could not be found. Make sure you are providing a valid bizinikiwi-based metadata"
)]
MissingCallMetadata(String, String),
/// Metadata for call could not be found.
#[error(
"Metadata for runtime API entry {0}_{1} could not be found. Make sure you are providing a valid bizinikiwi-based metadata"
)]
MissingRuntimeApiMetadata(String, String),
/// Call variant must have all named fields.
#[error(
"Call variant for type {0} must have all named fields. Make sure you are providing a valid bizinikiwi-based metadata"
)]
InvalidCallVariant(u32),
/// Type should be an variant/enum.
#[error(
"{0} type should be an variant/enum type. Make sure you are providing a valid bizinikiwi-based metadata"
)]
InvalidType(String),
/// Extrinsic call type could not be found.
#[error(
"Extrinsic call type could not be found. Make sure you are providing a valid bizinikiwi-based metadata"
)]
MissingCallType,
/// There are too many or too few hashers.
#[error(
"Could not generate functions for storage entry {storage_entry_name}. There are {key_count} keys, but only {hasher_count} hashers. The number of hashers must equal the number of keys or be exactly 1."
)]
InvalidStorageHasherCount {
/// The name of the storage entry
storage_entry_name: String,
/// Number of keys
key_count: usize,
/// Number of hashers
hasher_count: usize,
},
/// Cannot generate types.
#[error("Type Generation failed: {0}")]
TypeGeneration(#[from] TypegenError),
/// Error when generating metadata from Wasm-runtime
#[error("Failed to generate metadata from wasm file. reason: {0}")]
Wasm(String),
/// Other error.
#[error("Other error: {0}")]
Other(String),
}
impl CodegenError {
/// Fetch the location for this error.
// Todo: Probably worth storing location outside of the variant,
// so that there's a common way to set a location for some error.
fn get_location(&self) -> Span {
match self {
Self::InvalidModule(span) => *span,
Self::TypeGeneration(TypegenError::InvalidSubstitute(err)) => err.span,
Self::InvalidTypePath(_, err) => err.span(),
_ => proc_macro2::Span::call_site(),
}
}
/// Render the error as an invocation of syn::compile_error!.
pub fn into_compile_error(self) -> TokenStream2 {
let msg = self.to_string();
let span = self.get_location();
syn::Error::new(span, msg).into_compile_error()
}
}
+34
View File
@@ -0,0 +1,34 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::error::CodegenError;
use syn::token;
#[derive(Debug, PartialEq, Eq)]
pub struct ItemMod {
vis: syn::Visibility,
mod_token: token::Mod,
pub ident: syn::Ident,
brace: token::Brace,
items: Vec<syn::Item>,
}
impl TryFrom<syn::ItemMod> for ItemMod {
type Error = CodegenError;
fn try_from(module: syn::ItemMod) -> Result<Self, Self::Error> {
let (brace, items) = match module.content {
Some((brace, items)) => (brace, items),
None => return Err(CodegenError::InvalidModule(module.ident.span())),
};
Ok(Self { vis: module.vis, mod_token: module.mod_token, ident: module.ident, brace, items })
}
}
impl ItemMod {
pub fn rust_items(&self) -> impl Iterator<Item = &syn::Item> {
self.items.iter()
}
}
+406
View File
@@ -0,0 +1,406 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Generate a type safe Subxt interface for a Bizinikiwi runtime from its metadata.
//! This is used by the `#[subxt]` macro and `subxt codegen` CLI command, but can also
//! be used directly if preferable.
#![deny(missing_docs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
mod api;
pub mod error;
mod ir;
#[cfg(feature = "web")]
use getrandom as _;
use api::RuntimeGenerator;
use proc_macro2::TokenStream as TokenStream2;
use scale_typegen::{
DerivesRegistry, TypeGeneratorSettings, TypeSubstitutes, TypegenError,
typegen::settings::{AllocCratePath, substitutes::absolute_path},
};
use std::collections::HashMap;
use syn::parse_quote;
// Part of the public interface, so expose:
pub use error::CodegenError;
pub use pezkuwi_subxt_metadata::Metadata;
pub use syn;
/// Generate a type safe interface to use with `subxt`.
/// The options exposed here are similar to those exposed via
/// the `#[subxt]` macro or via the `subxt codegen` CLI command.
/// Both use this under the hood.
///
/// # Example
///
/// Generating an interface using all of the defaults:
///
/// ```rust,standalone_crate
/// use codec::Decode;
/// use pezkuwi_subxt_codegen::{ Metadata, CodegenBuilder };
///
/// // Get hold of and decode some metadata:
/// let encoded = std::fs::read("../artifacts/pezkuwi_metadata_full.scale").unwrap();
/// let metadata = Metadata::decode(&mut &*encoded).unwrap();
///
/// // Generate a TokenStream representing the code for the interface.
/// // This can be converted to a string, displayed as-is or output from a macro.
/// let token_stream = CodegenBuilder::new().generate(metadata);
/// ````
pub struct CodegenBuilder {
crate_path: syn::Path,
use_default_derives: bool,
use_default_substitutions: bool,
generate_docs: bool,
runtime_types_only: bool,
item_mod: syn::ItemMod,
extra_global_derives: Vec<syn::Path>,
extra_global_attributes: Vec<syn::Attribute>,
type_substitutes: HashMap<syn::Path, syn::Path>,
derives_for_type: HashMap<syn::TypePath, Vec<syn::Path>>,
attributes_for_type: HashMap<syn::TypePath, Vec<syn::Attribute>>,
derives_for_type_recursive: HashMap<syn::TypePath, Vec<syn::Path>>,
attributes_for_type_recursive: HashMap<syn::TypePath, Vec<syn::Attribute>>,
}
impl Default for CodegenBuilder {
fn default() -> Self {
CodegenBuilder {
crate_path: syn::parse_quote!(::pezkuwi_subxt::ext::pezkuwi_subxt_core),
use_default_derives: true,
use_default_substitutions: true,
generate_docs: true,
runtime_types_only: false,
item_mod: syn::parse_quote!(
pub mod api {}
),
extra_global_derives: Vec::new(),
extra_global_attributes: Vec::new(),
type_substitutes: HashMap::new(),
derives_for_type: HashMap::new(),
attributes_for_type: HashMap::new(),
derives_for_type_recursive: HashMap::new(),
attributes_for_type_recursive: HashMap::new(),
}
}
}
impl CodegenBuilder {
/// Construct a builder to configure and generate a type-safe interface for Subxt.
pub fn new() -> Self {
CodegenBuilder::default()
}
/// Disable the default derives that are applied to all types.
///
/// # Warning
///
/// This is not recommended, and is highly likely to break some part of the
/// generated interface. Expect compile errors.
pub fn disable_default_derives(&mut self) {
self.use_default_derives = false;
}
/// Disable the default type substitutions that are applied to the generated
/// code.
///
/// # Warning
///
/// This is not recommended, and is highly likely to break some part of the
/// generated interface. Expect compile errors.
pub fn disable_default_substitutes(&mut self) {
self.use_default_substitutions = false;
}
/// Disable the output of doc comments associated with the generated types and
/// methods. This can reduce the generated code size at the expense of losing
/// documentation.
pub fn no_docs(&mut self) {
self.generate_docs = false;
}
/// Only generate the types, and don't generate the rest of the Subxt specific
/// interface.
pub fn runtime_types_only(&mut self) {
self.runtime_types_only = true;
}
/// Set the additional derives that will be applied to all types. By default,
/// a set of derives required for Subxt are automatically added for all types.
///
/// # Warning
///
/// Invalid derives, or derives that cannot be applied to _all_ of the generated
/// types (taking into account that some types are substituted for hand written ones
/// that we cannot add extra derives for) will lead to compile errors in the
/// generated code.
pub fn set_additional_global_derives(&mut self, derives: Vec<syn::Path>) {
self.extra_global_derives = derives;
}
/// Set the additional attributes that will be applied to all types. By default,
/// a set of attributes required for Subxt are automatically added for all types.
///
/// # Warning
///
/// Invalid attributes can very easily lead to compile errors in the generated code.
pub fn set_additional_global_attributes(&mut self, attributes: Vec<syn::Attribute>) {
self.extra_global_attributes = attributes;
}
/// Set additional derives for a specific type at the path given.
///
/// If you want to set the additional derives on all contained types recursively as well,
/// you can set the `recursive` argument to `true`. If you don't do that,
/// there might be compile errors in the generated code, if the derived trait
/// relies on the fact that contained types also implement that trait.
pub fn add_derives_for_type(
&mut self,
ty: syn::TypePath,
derives: impl IntoIterator<Item = syn::Path>,
recursive: bool,
) {
if recursive {
self.derives_for_type_recursive.entry(ty).or_default().extend(derives);
} else {
self.derives_for_type.entry(ty).or_default().extend(derives);
}
}
/// Set additional attributes for a specific type at the path given.
///
/// Setting the `recursive` argument to `true` will additionally add the specified
/// attributes to all contained types recursively.
pub fn add_attributes_for_type(
&mut self,
ty: syn::TypePath,
attributes: impl IntoIterator<Item = syn::Attribute>,
recursive: bool,
) {
if recursive {
self.attributes_for_type_recursive.entry(ty).or_default().extend(attributes);
} else {
self.attributes_for_type.entry(ty).or_default().extend(attributes);
}
}
/// Substitute a type at the given path with some type at the second path. During codegen,
/// we will avoid generating the type at the first path given, and instead point any references
/// to that type to the second path given.
///
/// The substituted type will need to implement the relevant traits to be compatible with the
/// original, and it will need to SCALE encode and SCALE decode in a compatible way.
pub fn set_type_substitute(&mut self, ty: syn::Path, with: syn::Path) {
self.type_substitutes.insert(ty, with);
}
/// By default, all of the code is generated inside a module `pub mod api {}`. We decorate
/// this module with a few attributes to reduce compile warnings and things. You can provide a
/// target module here, allowing you to add additional attributes or inner code items (with the
/// warning that duplicate identifiers will lead to compile errors).
pub fn set_target_module(&mut self, item_mod: syn::ItemMod) {
self.item_mod = item_mod;
}
/// Set the path to the `subxt` crate. By default, we expect it to be at
/// `::pezkuwi_subxt::ext::pezkuwi_subxt_core`.
///
/// # Panics
///
/// Panics if the path provided is not an absolute path.
pub fn set_subxt_crate_path(&mut self, crate_path: syn::Path) {
if absolute_path(crate_path.clone()).is_err() {
// Throw an error here, because otherwise we end up with a harder to comprehend error
// when substitute types don't begin with an absolute path.
panic!(
"The provided crate path must be an absolute path, ie prefixed with '::' or 'crate'"
);
}
self.crate_path = crate_path;
}
/// Generate an interface, assuming that the default path to the `subxt` crate is
/// `::pezkuwi_subxt::ext::pezkuwi_subxt_core`. If the `subxt` crate is not available as a top
/// level dependency, use `generate` and provide a valid path to the `subxt¦ crate.
pub fn generate(self, metadata: Metadata) -> Result<TokenStream2, CodegenError> {
let crate_path = self.crate_path;
let mut derives_registry: DerivesRegistry = if self.use_default_derives {
default_derives(&crate_path)
} else {
DerivesRegistry::new()
};
derives_registry.add_derives_for_all(self.extra_global_derives);
derives_registry.add_attributes_for_all(self.extra_global_attributes);
for (ty, derives) in self.derives_for_type {
derives_registry.add_derives_for(ty, derives, false);
}
for (ty, derives) in self.derives_for_type_recursive {
derives_registry.add_derives_for(ty, derives, true);
}
for (ty, attributes) in self.attributes_for_type {
derives_registry.add_attributes_for(ty, attributes, false);
}
for (ty, attributes) in self.attributes_for_type_recursive {
derives_registry.add_attributes_for(ty, attributes, true);
}
let mut type_substitutes: TypeSubstitutes = if self.use_default_substitutions {
default_substitutes(&crate_path)
} else {
TypeSubstitutes::new()
};
for (from, with) in self.type_substitutes {
let abs_path = absolute_path(with).map_err(TypegenError::from)?;
type_substitutes.insert(from, abs_path).map_err(TypegenError::from)?;
}
let item_mod = self.item_mod;
let generator = RuntimeGenerator::new(metadata);
let should_gen_docs = self.generate_docs;
if self.runtime_types_only {
generator.generate_runtime_types(
item_mod,
derives_registry,
type_substitutes,
crate_path,
should_gen_docs,
)
} else {
generator.generate_runtime(
item_mod,
derives_registry,
type_substitutes,
crate_path,
should_gen_docs,
)
}
}
}
/// The default [`scale_typegen::TypeGeneratorSettings`], subxt is using for generating code.
/// Useful for emulating subxt's code generation settings from e.g. subxt-explorer.
pub fn default_subxt_type_gen_settings() -> TypeGeneratorSettings {
let crate_path: syn::Path = parse_quote!(::pezkuwi_subxt::ext::pezkuwi_subxt_core);
let derives = default_derives(&crate_path);
let substitutes = default_substitutes(&crate_path);
subxt_type_gen_settings(derives, substitutes, &crate_path, true)
}
fn subxt_type_gen_settings(
derives: scale_typegen::DerivesRegistry,
substitutes: scale_typegen::TypeSubstitutes,
crate_path: &syn::Path,
should_gen_docs: bool,
) -> TypeGeneratorSettings {
// Are we using codec::Encode or codec::Decode derives?
let are_codec_derives_used = derives.default_derives().derives().iter().any(|path| {
let mut segments_backwards = path.segments.iter().rev();
let ident = segments_backwards.next();
let module = segments_backwards.next();
let is_ident_match = ident.is_some_and(|s| s.ident == "Encode" || s.ident == "Decode");
let is_module_match = module.is_some_and(|s| s.ident == "codec");
is_ident_match && is_module_match
});
// If we're inserting the codec derives, we also should use `CompactAs` where necessary.
let compact_as_type_path =
are_codec_derives_used.then(|| parse_quote!(#crate_path::ext::codec::CompactAs));
TypeGeneratorSettings {
types_mod_ident: parse_quote!(runtime_types),
should_gen_docs,
derives,
substitutes,
decoded_bits_type_path: Some(parse_quote!(#crate_path::utils::bits::DecodedBits)),
compact_as_type_path,
compact_type_path: Some(parse_quote!(#crate_path::ext::codec::Compact)),
alloc_crate_path: AllocCratePath::Custom(parse_quote!(#crate_path::alloc)),
// Note: even when we don't use codec::Encode and codec::Decode, we need to keep
// #[codec(...)] attributes because `#[codec(skip)]` is still used/important with
// `EncodeAsType` and `DecodeAsType`.
insert_codec_attributes: true,
}
}
fn default_derives(crate_path: &syn::Path) -> DerivesRegistry {
let encode_crate_path = quote::quote! { #crate_path::ext::scale_encode }.to_string();
let decode_crate_path = quote::quote! { #crate_path::ext::scale_decode }.to_string();
let derives: [syn::Path; 3] = [
parse_quote!(#crate_path::ext::scale_encode::EncodeAsType),
parse_quote!(#crate_path::ext::scale_decode::DecodeAsType),
parse_quote!(Debug),
];
let attributes: [syn::Attribute; 2] = [
parse_quote!(#[encode_as_type(crate_path = #encode_crate_path)]),
parse_quote!(#[decode_as_type(crate_path = #decode_crate_path)]),
];
let mut derives_registry = DerivesRegistry::new();
derives_registry.add_derives_for_all(derives);
derives_registry.add_attributes_for_all(attributes);
derives_registry
}
fn default_substitutes(crate_path: &syn::Path) -> TypeSubstitutes {
let mut type_substitutes = TypeSubstitutes::new();
let defaults: [(syn::Path, syn::Path); 13] = [
(parse_quote!(bitvec::order::Lsb0), parse_quote!(#crate_path::utils::bits::Lsb0)),
(parse_quote!(bitvec::order::Msb0), parse_quote!(#crate_path::utils::bits::Msb0)),
(
parse_quote!(pezsp_core::crypto::AccountId32),
parse_quote!(#crate_path::utils::AccountId32),
),
(parse_quote!(fp_account::AccountId20), parse_quote!(#crate_path::utils::AccountId20)),
(
parse_quote!(pezsp_runtime::multiaddress::MultiAddress),
parse_quote!(#crate_path::utils::MultiAddress),
),
(parse_quote!(primitive_types::H160), parse_quote!(#crate_path::utils::H160)),
(parse_quote!(primitive_types::H256), parse_quote!(#crate_path::utils::H256)),
(parse_quote!(primitive_types::H512), parse_quote!(#crate_path::utils::H512)),
(
parse_quote!(pezframe_support::traits::misc::WrapperKeepOpaque),
parse_quote!(#crate_path::utils::WrapperKeepOpaque),
),
// BTreeMap and BTreeSet impose an `Ord` constraint on their key types. This
// can cause an issue with generated code that doesn't impl `Ord` by default.
// Decoding them to Vec by default (KeyedVec is just an alias for Vec with
// suitable type params) avoids these issues.
(parse_quote!(BTreeMap), parse_quote!(#crate_path::utils::KeyedVec)),
(parse_quote!(BinaryHeap), parse_quote!(#crate_path::alloc::vec::Vec)),
(parse_quote!(BTreeSet), parse_quote!(#crate_path::alloc::vec::Vec)),
// The `UncheckedExtrinsic(pub Vec<u8>)` is part of the runtime API calls.
// The inner bytes represent the encoded extrinsic, however when deriving the
// `EncodeAsType` the bytes would be re-encoded. This leads to the bytes
// being altered by adding the length prefix in front of them.
// Note: Not sure if this is appropriate or not. The most recent pezkuwi.rs file does not
// have these.
(
parse_quote!(pezsp_runtime::generic::unchecked_extrinsic::UncheckedExtrinsic),
parse_quote!(#crate_path::utils::UncheckedExtrinsic),
),
];
let defaults = defaults.into_iter().map(|(from, to)| {
(from, absolute_path(to).expect("default substitutes above are absolute paths; qed"))
});
type_substitutes
.extend(defaults)
.expect("default substitutes can always be parsed; qed");
type_substitutes
}
+83
View File
@@ -0,0 +1,83 @@
[package]
name = "pezkuwi-subxt-core"
version.workspace = true
authors.workspace = true
edition.workspace = true
rust-version.workspace = true
publish = true
license.workspace = true
readme = "README.md"
repository.workspace = true
documentation.workspace = true
homepage.workspace = true
description = "A no-std compatible subset of Subxt's functionality"
keywords = ["extrinsic", "no-std", "parity", "subxt"]
[features]
default = ["std"]
std = [
"codec/std",
"frame-metadata/std",
"hex/std",
"impl-serde/std",
"pezkuwi-subxt-metadata/std",
"pezsp-core/std",
"pezsp-crypto-hashing/std",
"pezsp-keyring/std",
"primitive-types/std",
"scale-info/std",
"serde/std",
"serde_json/std",
"tracing/std",
]
[dependencies]
codec = { package = "parity-scale-codec", workspace = true, default-features = false, features = ["derive"] }
derive-where = { workspace = true }
frame-decode = { workspace = true }
frame-metadata = { workspace = true, default-features = false }
hashbrown = { workspace = true }
hex = { workspace = true }
pezkuwi-subxt-metadata = { workspace = true, default-features = false }
pezsp-crypto-hashing = { workspace = true }
scale-bits = { workspace = true, default-features = false }
scale-decode = { workspace = true, default-features = false, features = ["derive", "primitive-types"] }
scale-encode = { workspace = true, default-features = false, features = ["bits", "derive", "primitive-types"] }
scale-info = { workspace = true, default-features = false, features = ["bit-vec"] }
scale-value = { workspace = true, default-features = false }
serde = { workspace = true, default-features = false, features = ["derive"] }
serde_json = { workspace = true, default-features = false, features = ["alloc", "raw_value"] }
thiserror = { workspace = true, default-features = false }
tracing = { workspace = true, default-features = false }
# For ss58 encoding AccountId32 to serialize them properly:
base58 = { workspace = true }
blake2 = { workspace = true }
# Provides some deserialization, types like U256/H256 and hashing impls like twox/blake256:
impl-serde = { workspace = true, default-features = false }
primitive-types = { workspace = true, default-features = false, features = ["codec", "scale-info", "serde_no_std"] }
# AccountId20
keccak-hash = { workspace = true }
[dev-dependencies]
assert_matches = { workspace = true }
bitvec = { workspace = true }
codec = { workspace = true, features = ["bit-vec", "derive"] }
hex = { workspace = true }
pezkuwi-subxt-macro = { workspace = true }
pezkuwi-subxt-signer = { workspace = true, features = ["sr25519", "subxt"] }
pezsp-core = { workspace = true }
pezsp-keyring = { workspace = true }
[package.metadata.docs.rs]
default-features = true
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.playground]
default-features = true
[lints]
workspace = true
+3
View File
@@ -0,0 +1,3 @@
# Subxt-Core
This library provides a no-std compatible subset of functionality that `subxt` and `subxt-signer` rely on.
@@ -0,0 +1,146 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::{
Metadata,
config::{
Config, TransactionExtension,
transaction_extensions::{ChargeAssetTxPayment, ChargeTransactionPayment, CheckNonce},
},
dynamic::Value,
error::ExtrinsicError,
};
use alloc::borrow::ToOwned;
use frame_decode::extrinsics::ExtrinsicExtensions;
use scale_decode::DecodeAsType;
/// The signed extensions of an extrinsic.
#[derive(Debug, Clone)]
pub struct ExtrinsicTransactionExtensions<'a, T: Config> {
bytes: &'a [u8],
metadata: &'a Metadata,
decoded_info: &'a ExtrinsicExtensions<'static, u32>,
_marker: core::marker::PhantomData<T>,
}
impl<'a, T: Config> ExtrinsicTransactionExtensions<'a, T> {
pub(crate) fn new(
bytes: &'a [u8],
metadata: &'a Metadata,
decoded_info: &'a ExtrinsicExtensions<'static, u32>,
) -> Self {
Self { bytes, metadata, decoded_info, _marker: core::marker::PhantomData }
}
/// Returns an iterator over each of the signed extension details of the extrinsic.
pub fn iter(&self) -> impl Iterator<Item = ExtrinsicTransactionExtension<'a, T>> + use<'a, T> {
self.decoded_info.iter().map(|s| ExtrinsicTransactionExtension {
bytes: &self.bytes[s.range()],
ty_id: *s.ty(),
identifier: s.name(),
metadata: self.metadata,
_marker: core::marker::PhantomData,
})
}
/// Searches through all signed extensions to find a specific one.
/// If the Signed Extension is not found `Ok(None)` is returned.
/// If the Signed Extension is found but decoding failed `Err(_)` is returned.
pub fn find<S: TransactionExtension<T>>(&self) -> Result<Option<S::Decoded>, ExtrinsicError> {
for ext in self.iter() {
match ext.as_signed_extension::<S>() {
// We found a match; return it:
Ok(Some(e)) => return Ok(Some(e)),
// No error, but no match either; next!
Ok(None) => continue,
// Error? return it
Err(e) => return Err(e),
}
}
Ok(None)
}
/// The tip of an extrinsic, extracted from the ChargeTransactionPayment or ChargeAssetTxPayment
/// signed extension, depending on which is present.
///
/// Returns `None` if `tip` was not found or decoding failed.
pub fn tip(&self) -> Option<u128> {
// Note: the overhead of iterating multiple time should be negligible.
self.find::<ChargeTransactionPayment>()
.ok()
.flatten()
.map(|e| e.tip())
.or_else(|| self.find::<ChargeAssetTxPayment<T>>().ok().flatten().map(|e| e.tip()))
}
/// The nonce of the account that submitted the extrinsic, extracted from the CheckNonce signed
/// extension.
///
/// Returns `None` if `nonce` was not found or decoding failed.
pub fn nonce(&self) -> Option<u64> {
self.find::<CheckNonce>().ok()?
}
}
/// A single signed extension
#[derive(Debug, Clone)]
pub struct ExtrinsicTransactionExtension<'a, T: Config> {
bytes: &'a [u8],
ty_id: u32,
identifier: &'a str,
metadata: &'a Metadata,
_marker: core::marker::PhantomData<T>,
}
impl<'a, T: Config> ExtrinsicTransactionExtension<'a, T> {
/// The bytes representing this signed extension.
pub fn bytes(&self) -> &'a [u8] {
self.bytes
}
/// The name of the signed extension.
pub fn name(&self) -> &'a str {
self.identifier
}
/// The type id of the signed extension.
pub fn type_id(&self) -> u32 {
self.ty_id
}
/// Signed Extension as a [`scale_value::Value`]
pub fn value(&self) -> Result<Value<u32>, ExtrinsicError> {
let value = scale_value::scale::decode_as_type(
&mut &self.bytes[..],
self.ty_id,
self.metadata.types(),
)
.map_err(|e| ExtrinsicError::CouldNotDecodeTransactionExtension {
name: self.identifier.to_owned(),
error: e.into(),
})?;
Ok(value)
}
/// Decodes the bytes of this Signed Extension into its associated `Decoded` type.
/// Returns `Ok(None)` if the data we have doesn't match the Signed Extension we're asking to
/// decode with.
pub fn as_signed_extension<S: TransactionExtension<T>>(
&self,
) -> Result<Option<S::Decoded>, ExtrinsicError> {
if !S::matches(self.identifier, self.ty_id, self.metadata.types()) {
return Ok(None);
}
self.as_type::<S::Decoded>().map(Some)
}
fn as_type<E: DecodeAsType>(&self) -> Result<E, ExtrinsicError> {
let value = E::decode_as_type(&mut &self.bytes[..], self.ty_id, self.metadata.types())
.map_err(|e| ExtrinsicError::CouldNotDecodeTransactionExtension {
name: self.identifier.to_owned(),
error: e,
})?;
Ok(value)
}
}
+596
View File
@@ -0,0 +1,596 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::{
Metadata,
blocks::extrinsic_transaction_extensions::ExtrinsicTransactionExtensions,
config::{Config, HashFor, Hasher},
error::{ExtrinsicDecodeErrorAt, ExtrinsicDecodeErrorAtReason, ExtrinsicError},
};
use alloc::{sync::Arc, vec::Vec};
use frame_decode::extrinsics::Extrinsic;
use scale_decode::{DecodeAsFields, DecodeAsType};
pub use crate::blocks::StaticExtrinsic;
/// The body of a block.
pub struct Extrinsics<T: Config> {
extrinsics: Vec<Arc<(Extrinsic<'static, u32>, Vec<u8>)>>,
metadata: Metadata,
hasher: T::Hasher,
_marker: core::marker::PhantomData<T>,
}
impl<T: Config> Extrinsics<T> {
/// Instantiate a new [`Extrinsics`] object, given a vector containing
/// each extrinsic hash (in the form of bytes) and some metadata that
/// we'll use to decode them.
pub fn decode_from(
extrinsics: Vec<Vec<u8>>,
metadata: Metadata,
) -> Result<Self, ExtrinsicDecodeErrorAt> {
let hasher = T::Hasher::new(&metadata);
let extrinsics = extrinsics
.into_iter()
.enumerate()
.map(|(extrinsic_index, bytes)| {
let cursor = &mut &*bytes;
// Try to decode the extrinsic.
let decoded_info =
frame_decode::extrinsics::decode_extrinsic(cursor, &metadata, metadata.types())
.map_err(|error| ExtrinsicDecodeErrorAt {
extrinsic_index,
error: ExtrinsicDecodeErrorAtReason::DecodeError(error),
})?
.into_owned();
// We didn't consume all bytes, so decoding probably failed.
if !cursor.is_empty() {
return Err(ExtrinsicDecodeErrorAt {
extrinsic_index,
error: ExtrinsicDecodeErrorAtReason::LeftoverBytes(cursor.to_vec()),
});
}
Ok(Arc::new((decoded_info, bytes)))
})
.collect::<Result<_, ExtrinsicDecodeErrorAt>>()?;
Ok(Self { extrinsics, hasher, metadata, _marker: core::marker::PhantomData })
}
/// The number of extrinsics.
pub fn len(&self) -> usize {
self.extrinsics.len()
}
/// Are there no extrinsics in this block?
// Note: mainly here to satisfy clippy.
pub fn is_empty(&self) -> bool {
self.extrinsics.is_empty()
}
/// Returns an iterator over the extrinsics in the block body.
// Dev note: The returned iterator is 'static + Send so that we can box it up and make
// use of it with our `FilterExtrinsic` stuff.
pub fn iter(&self) -> impl Iterator<Item = ExtrinsicDetails<T>> + Send + Sync + 'static {
let extrinsics = self.extrinsics.clone();
let num_extrinsics = self.extrinsics.len();
let hasher = self.hasher;
let metadata = self.metadata.clone();
(0..num_extrinsics).map(move |index| {
ExtrinsicDetails::new(index as u32, extrinsics[index].clone(), hasher, metadata.clone())
})
}
/// Iterate through the extrinsics using metadata to dynamically decode and skip
/// them, and return only those which should decode to the provided `E` type.
/// If an error occurs, all subsequent iterations return `None`.
pub fn find<E: StaticExtrinsic>(
&self,
) -> impl Iterator<Item = Result<FoundExtrinsic<T, E>, ExtrinsicError>> {
self.iter().filter_map(|details| {
match details.as_extrinsic::<E>() {
// Failed to decode extrinsic:
Err(err) => Some(Err(err)),
// Extrinsic for a different pallet / different call (skip):
Ok(None) => None,
Ok(Some(value)) => Some(Ok(FoundExtrinsic { details, value })),
}
})
}
/// Iterate through the extrinsics using metadata to dynamically decode and skip
/// them, and return the first extrinsic found which decodes to the provided `E` type.
pub fn find_first<E: StaticExtrinsic>(
&self,
) -> Result<Option<FoundExtrinsic<T, E>>, ExtrinsicError> {
self.find::<E>().next().transpose()
}
/// Iterate through the extrinsics using metadata to dynamically decode and skip
/// them, and return the last extrinsic found which decodes to the provided `Ev` type.
pub fn find_last<E: StaticExtrinsic>(
&self,
) -> Result<Option<FoundExtrinsic<T, E>>, ExtrinsicError> {
self.find::<E>().last().transpose()
}
/// Find an extrinsics that decodes to the type provided. Returns true if it was found.
pub fn has<E: StaticExtrinsic>(&self) -> Result<bool, ExtrinsicError> {
Ok(self.find::<E>().next().transpose()?.is_some())
}
}
/// A single extrinsic in a block.
pub struct ExtrinsicDetails<T: Config> {
/// The index of the extrinsic in the block.
index: u32,
/// Extrinsic bytes and decode info.
ext: Arc<(Extrinsic<'static, u32>, Vec<u8>)>,
/// Hash the extrinsic if we want.
hasher: T::Hasher,
/// Subxt metadata to fetch the extrinsic metadata.
metadata: Metadata,
_marker: core::marker::PhantomData<T>,
}
impl<T> ExtrinsicDetails<T>
where
T: Config,
{
// Attempt to dynamically decode a single extrinsic from the given input.
#[doc(hidden)]
pub fn new(
index: u32,
ext: Arc<(Extrinsic<'static, u32>, Vec<u8>)>,
hasher: T::Hasher,
metadata: Metadata,
) -> ExtrinsicDetails<T> {
ExtrinsicDetails { index, ext, hasher, metadata, _marker: core::marker::PhantomData }
}
/// Calculate and return the hash of the extrinsic, based on the configured hasher.
pub fn hash(&self) -> HashFor<T> {
// Use hash(), not hash_of(), because we don't want to double encode the bytes.
self.hasher.hash(self.bytes())
}
/// Is the extrinsic signed?
pub fn is_signed(&self) -> bool {
self.decoded_info().is_signed()
}
/// The index of the extrinsic in the block.
pub fn index(&self) -> u32 {
self.index
}
/// Return _all_ of the bytes representing this extrinsic, which include, in order:
/// - First byte: abbbbbbb (a = 0 for unsigned, 1 for signed, b = version)
/// - SignatureType (if the payload is signed)
/// - Address
/// - Signature
/// - Extra fields
/// - Extrinsic call bytes
pub fn bytes(&self) -> &[u8] {
&self.ext.1
}
/// Return only the bytes representing this extrinsic call:
/// - First byte is the pallet index
/// - Second byte is the variant (call) index
/// - Followed by field bytes.
///
/// # Note
///
/// Please use [`Self::bytes`] if you want to get all extrinsic bytes.
pub fn call_bytes(&self) -> &[u8] {
&self.bytes()[self.decoded_info().call_data_range()]
}
/// Return the bytes representing the fields stored in this extrinsic.
///
/// # Note
///
/// This is a subset of [`Self::call_bytes`] that does not include the
/// first two bytes that denote the pallet index and the variant index.
pub fn field_bytes(&self) -> &[u8] {
// Note: this cannot panic because we checked the extrinsic bytes
// to contain at least two bytes.
&self.bytes()[self.decoded_info().call_data_args_range()]
}
/// Return only the bytes of the address that signed this extrinsic.
///
/// # Note
///
/// Returns `None` if the extrinsic is not signed.
pub fn address_bytes(&self) -> Option<&[u8]> {
self.decoded_info()
.signature_payload()
.map(|s| &self.bytes()[s.address_range()])
}
/// Returns Some(signature_bytes) if the extrinsic was signed otherwise None is returned.
pub fn signature_bytes(&self) -> Option<&[u8]> {
self.decoded_info()
.signature_payload()
.map(|s| &self.bytes()[s.signature_range()])
}
/// Returns the signed extension `extra` bytes of the extrinsic.
/// Each signed extension has an `extra` type (May be zero-sized).
/// These bytes are the scale encoded `extra` fields of each signed extension in order of the
/// signed extensions. They do *not* include the `additional` signed bytes that are used as
/// part of the payload that is signed.
///
/// Note: Returns `None` if the extrinsic is not signed.
pub fn transaction_extensions_bytes(&self) -> Option<&[u8]> {
self.decoded_info()
.transaction_extension_payload()
.map(|t| &self.bytes()[t.range()])
}
/// Returns `None` if the extrinsic is not signed.
pub fn transaction_extensions(&self) -> Option<ExtrinsicTransactionExtensions<'_, T>> {
self.decoded_info()
.transaction_extension_payload()
.map(|t| ExtrinsicTransactionExtensions::new(self.bytes(), &self.metadata, t))
}
/// The index of the pallet that the extrinsic originated from.
pub fn pallet_index(&self) -> u8 {
self.decoded_info().pallet_index()
}
/// The index of the extrinsic variant that the extrinsic originated from.
pub fn call_index(&self) -> u8 {
self.decoded_info().call_index()
}
/// The name of the pallet from whence the extrinsic originated.
pub fn pallet_name(&self) -> &str {
self.decoded_info().pallet_name()
}
/// The name of the call (ie the name of the variant that it corresponds to).
pub fn call_name(&self) -> &str {
self.decoded_info().call_name()
}
/// Decode and provide the extrinsic fields back in the form of a [`scale_value::Composite`]
/// type which represents the named or unnamed fields that were present in the extrinsic.
pub fn decode_as_fields<E: DecodeAsFields>(&self) -> Result<E, ExtrinsicError> {
let bytes = &mut self.field_bytes();
let mut fields = self.decoded_info().call_data().map(|d| {
let name = if d.name().is_empty() { None } else { Some(d.name()) };
scale_decode::Field::new(*d.ty(), name)
});
let decoded =
E::decode_as_fields(bytes, &mut fields, self.metadata.types()).map_err(|e| {
ExtrinsicError::CannotDecodeFields {
extrinsic_index: self.index as usize,
error: e,
}
})?;
Ok(decoded)
}
/// Attempt to decode these [`ExtrinsicDetails`] into a type representing the extrinsic fields.
/// Such types are exposed in the codegen as `pallet_name::calls::types::CallName` types.
pub fn as_extrinsic<E: StaticExtrinsic>(&self) -> Result<Option<E>, ExtrinsicError> {
if self.decoded_info().pallet_name() == E::PALLET &&
self.decoded_info().call_name() == E::CALL
{
let mut fields = self.decoded_info().call_data().map(|d| {
let name = if d.name().is_empty() { None } else { Some(d.name()) };
scale_decode::Field::new(*d.ty(), name)
});
let decoded =
E::decode_as_fields(&mut self.field_bytes(), &mut fields, self.metadata.types())
.map_err(|e| ExtrinsicError::CannotDecodeFields {
extrinsic_index: self.index as usize,
error: e,
})?;
Ok(Some(decoded))
} else {
Ok(None)
}
}
/// Attempt to decode these [`ExtrinsicDetails`] into an outer call enum type (which includes
/// the pallet and extrinsic enum variants as well as the extrinsic fields). A compatible
/// type for this is exposed via static codegen as a root level `Call` type.
pub fn as_root_extrinsic<E: DecodeAsType>(&self) -> Result<E, ExtrinsicError> {
let decoded = E::decode_as_type(
&mut &self.call_bytes()[..],
self.metadata.outer_enums().call_enum_ty(),
self.metadata.types(),
)
.map_err(|e| ExtrinsicError::CannotDecodeIntoRootExtrinsic {
extrinsic_index: self.index as usize,
error: e,
})?;
Ok(decoded)
}
fn decoded_info(&self) -> &Extrinsic<'static, u32> {
&self.ext.0
}
}
/// A Static Extrinsic found in a block coupled with it's details.
pub struct FoundExtrinsic<T: Config, E> {
/// Details for the extrinsic.
pub details: ExtrinsicDetails<T>,
/// The decoded extrinsic value.
pub value: E,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::BizinikiwConfig;
use assert_matches::assert_matches;
use codec::{Decode, Encode};
use frame_metadata::{
RuntimeMetadataPrefixed,
v15::{
CustomMetadata, ExtrinsicMetadata, OuterEnums, PalletCallMetadata, PalletMetadata,
RuntimeMetadataV15,
},
};
use scale_info::{TypeInfo, meta_type};
use scale_value::Value;
// Extrinsic needs to contain at least the generic type parameter "Call"
// for the metadata to be valid.
// The "Call" type from the metadata is used to decode extrinsics.
#[allow(unused)]
#[derive(TypeInfo)]
struct ExtrinsicType<Address, Call, Signature, Extra> {
pub signature: Option<(Address, Signature, Extra)>,
pub function: Call,
}
// Because this type is used to decode extrinsics, we expect this to be a TypeDefVariant.
// Each pallet must contain one single variant.
#[allow(unused)]
#[derive(
Encode,
Decode,
TypeInfo,
Clone,
Debug,
PartialEq,
Eq,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
enum RuntimeCall {
Test(Pallet),
}
// The calls of the pallet.
#[allow(unused)]
#[derive(
Encode,
Decode,
TypeInfo,
Clone,
Debug,
PartialEq,
Eq,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
enum Pallet {
#[allow(unused)]
#[codec(index = 2)]
TestCall { value: u128, signed: bool, name: String },
}
#[allow(unused)]
#[derive(
Encode,
Decode,
TypeInfo,
Clone,
Debug,
PartialEq,
Eq,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
struct TestCallExtrinsic {
value: u128,
signed: bool,
name: String,
}
impl StaticExtrinsic for TestCallExtrinsic {
const PALLET: &'static str = "Test";
const CALL: &'static str = "TestCall";
}
/// Build fake metadata consisting the types needed to represent an extrinsic.
fn metadata() -> Metadata {
let pallets = vec![PalletMetadata {
name: "Test",
storage: None,
calls: Some(PalletCallMetadata { ty: meta_type::<Pallet>() }),
event: None,
constants: vec![],
error: None,
index: 0,
docs: vec![],
}];
let extrinsic = ExtrinsicMetadata {
version: 4,
signed_extensions: vec![],
address_ty: meta_type::<()>(),
call_ty: meta_type::<RuntimeCall>(),
signature_ty: meta_type::<()>(),
extra_ty: meta_type::<()>(),
};
let meta = RuntimeMetadataV15::new(
pallets,
extrinsic,
meta_type::<()>(),
vec![],
OuterEnums {
call_enum_ty: meta_type::<RuntimeCall>(),
event_enum_ty: meta_type::<()>(),
error_enum_ty: meta_type::<()>(),
},
CustomMetadata { map: Default::default() },
);
let runtime_metadata: RuntimeMetadataPrefixed = meta.into();
let metadata: pezkuwi_subxt_metadata::Metadata = runtime_metadata.try_into().unwrap();
metadata
}
#[test]
fn extrinsic_metadata_consistency() {
let metadata = metadata();
// Except our metadata to contain the registered types.
let pallet = metadata.pallet_by_call_index(0).expect("pallet exists");
let extrinsic = pallet
.call_variant_by_index(2)
.expect("metadata contains the RuntimeCall enum with this pallet");
assert_eq!(pallet.name(), "Test");
assert_eq!(&extrinsic.name, "TestCall");
}
#[test]
fn insufficient_extrinsic_bytes() {
let metadata = metadata();
// Decode with empty bytes.
let result = Extrinsics::<BizinikiwConfig>::decode_from(vec![vec![]], metadata);
assert_matches!(
result.err(),
Some(crate::error::ExtrinsicDecodeErrorAt { extrinsic_index: 0, error: _ })
);
}
#[test]
fn unsupported_version_extrinsic() {
use frame_decode::extrinsics::ExtrinsicDecodeError;
let metadata = metadata();
// Decode with invalid version.
let result = Extrinsics::<BizinikiwConfig>::decode_from(vec![vec![3u8].encode()], metadata);
assert_matches!(
result.err(),
Some(crate::error::ExtrinsicDecodeErrorAt {
extrinsic_index: 0,
error: ExtrinsicDecodeErrorAtReason::DecodeError(
ExtrinsicDecodeError::VersionNotSupported(3)
),
})
);
}
#[test]
fn tx_hashes_line_up() {
let metadata = metadata();
let hasher = <BizinikiwConfig as Config>::Hasher::new(&metadata);
let tx = crate::dynamic::tx(
"Test",
"TestCall",
vec![Value::u128(10), Value::bool(true), Value::string("SomeValue")],
);
// Encoded TX ready to submit.
let tx_encoded = crate::tx::create_v4_unsigned::<BizinikiwConfig, _>(&tx, &metadata)
.expect("Valid dynamic parameters are provided");
// Extrinsic details ready to decode.
let extrinsics = Extrinsics::<BizinikiwConfig>::decode_from(
vec![tx_encoded.encoded().to_owned()],
metadata,
)
.expect("Valid extrinsic");
let extrinsic = extrinsics.iter().next().unwrap();
// Both of these types should produce the same bytes.
assert_eq!(tx_encoded.encoded(), extrinsic.bytes(), "bytes should eq");
// Both of these types should produce the same hash.
assert_eq!(tx_encoded.hash_with(hasher), extrinsic.hash(), "hashes should eq");
}
#[test]
fn statically_decode_extrinsic() {
let metadata = metadata();
let tx = crate::dynamic::tx(
"Test",
"TestCall",
vec![Value::u128(10), Value::bool(true), Value::string("SomeValue")],
);
let tx_encoded = crate::tx::create_v4_unsigned::<BizinikiwConfig, _>(&tx, &metadata)
.expect("Valid dynamic parameters are provided");
// Note: `create_unsigned` produces the extrinsic bytes by prefixing the extrinsic length.
// The length is handled deserializing `ChainBlockExtrinsic`, therefore the first byte is
// not needed.
let extrinsics = Extrinsics::<BizinikiwConfig>::decode_from(
vec![tx_encoded.encoded().to_owned()],
metadata,
)
.expect("Valid extrinsic");
let extrinsic = extrinsics.iter().next().unwrap();
assert!(!extrinsic.is_signed());
assert_eq!(extrinsic.index(), 0);
assert_eq!(extrinsic.pallet_index(), 0);
assert_eq!(extrinsic.pallet_name(), "Test");
assert_eq!(extrinsic.call_index(), 2);
assert_eq!(extrinsic.call_name(), "TestCall");
// Decode the extrinsic to the root enum.
let decoded_extrinsic = extrinsic
.as_root_extrinsic::<RuntimeCall>()
.expect("can decode extrinsic to root enum");
assert_eq!(
decoded_extrinsic,
RuntimeCall::Test(Pallet::TestCall {
value: 10,
signed: true,
name: "SomeValue".into(),
})
);
// Decode the extrinsic to the extrinsic variant.
let decoded_extrinsic = extrinsic
.as_extrinsic::<TestCallExtrinsic>()
.expect("can decode extrinsic to extrinsic variant")
.expect("value cannot be None");
assert_eq!(
decoded_extrinsic,
TestCallExtrinsic { value: 10, signed: true, name: "SomeValue".into() }
);
}
}
+89
View File
@@ -0,0 +1,89 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Decode and iterate over the extrinsics in block bodies.
//!
//! Use the [`decode_from`] function as an entry point to decoding extrinsics, and then
//! have a look at [`Extrinsics`] and [`ExtrinsicDetails`] to see which methods are available
//! to work with the extrinsics.
//!
//! # Example
//!
//! ```rust
//! extern crate alloc;
//!
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::blocks;
//! use pezkuwi_subxt_core::Metadata;
//! use pezkuwi_subxt_core::config::PezkuwiConfig;
//! use alloc::vec;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_small.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Some metadata we'd like to use to help us decode extrinsics:
//! let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
//! let metadata = Metadata::decode_from(&metadata_bytes[..]).unwrap();
//!
//! // Some extrinsics we'd like to decode:
//! let ext_bytes = vec![
//! hex::decode("1004020000").unwrap(),
//! hex::decode("c10184001cbd2d43530a44705ad088af313e18f80b53ef16b36177cd4b77b846f2a5f07c01a27c400241aeafdea1871b32f1f01e92acd272ddfe6b2f8b73b64c606572a530c470a94ef654f7baa5828474754a1fe31b59f91f6bb5c2cd5a07c22d4b8b8387350100000000001448656c6c6f").unwrap(),
//! hex::decode("550284001cbd2d43530a44705ad088af313e18f80b53ef16b36177cd4b77b846f2a5f07c0144bb92734447c893ab16d520fae0d455257550efa28ee66bf6dc942cb8b00d5d2799b98bc2865d21812278a9a266acd7352f40742ff11a6ce1f400013961598485010000000400008eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a481700505a4f7e9f4eb106").unwrap()
//! ];
//!
//! // Given some chain config and metadata, we know how to decode the bytes.
//! let exts = blocks::decode_from::<PezkuwiConfig>(ext_bytes, metadata).unwrap();
//!
//! // We'll see 3 extrinsics:
//! assert_eq!(exts.len(), 3);
//!
//! // We can iterate over them and decode various details out of them.
//! for ext in exts.iter() {
//! println!("Pallet: {}", ext.pallet_name());
//! println!("Call: {}", ext.call_name());
//! }
//!
//! # let ext_details: Vec<_> = exts.iter()
//! # .map(|ext| {
//! # let pallet = ext.pallet_name().to_string();
//! # let call = ext.call_name().to_string();
//! # (pallet, call)
//! # })
//! # .collect();
//! #
//! # assert_eq!(ext_details, vec![
//! # ("Timestamp".to_owned(), "set".to_owned()),
//! # ("System".to_owned(), "remark".to_owned()),
//! # ("Balances".to_owned(), "transfer_allow_death".to_owned()),
//! # ]);
//! ```
mod extrinsic_transaction_extensions;
mod extrinsics;
mod static_extrinsic;
pub use crate::error::ExtrinsicError;
use crate::{Metadata, config::Config, error::ExtrinsicDecodeErrorAt};
use alloc::vec::Vec;
pub use extrinsic_transaction_extensions::{
ExtrinsicTransactionExtension, ExtrinsicTransactionExtensions,
};
pub use extrinsics::{ExtrinsicDetails, Extrinsics, FoundExtrinsic};
pub use static_extrinsic::StaticExtrinsic;
/// Instantiate a new [`Extrinsics`] object, given a vector containing each extrinsic hash (in the
/// form of bytes) and some metadata that we'll use to decode them.
///
/// This is a shortcut for [`Extrinsics::decode_from`].
pub fn decode_from<T: Config>(
extrinsics: Vec<Vec<u8>>,
metadata: Metadata,
) -> Result<Extrinsics<T>, ExtrinsicDecodeErrorAt> {
Extrinsics::decode_from(extrinsics, metadata)
}
@@ -0,0 +1,23 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use scale_decode::DecodeAsFields;
/// Trait to uniquely identify the extrinsic's identity from the runtime metadata.
///
/// Generated API structures that represent an extrinsic implement this trait.
///
/// The trait is utilized to decode emitted extrinsics from a block, via obtaining the
/// form of the `Extrinsic` from the metadata.
pub trait StaticExtrinsic: DecodeAsFields {
/// Pallet name.
const PALLET: &'static str;
/// Call name.
const CALL: &'static str;
/// Returns true if the given pallet and call names match this extrinsic.
fn is_extrinsic(pallet: &str, call: &str) -> bool {
Self::PALLET == pallet && Self::CALL == call
}
}
+42
View File
@@ -0,0 +1,42 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! A couple of client types that we use elsewhere.
use crate::{
Metadata,
config::{Config, HashFor},
};
use derive_where::derive_where;
/// This provides access to some relevant client state in transaction extensions,
/// and is just a combination of some of the available properties.
#[derive_where(Clone, Debug)]
pub struct ClientState<C: Config> {
/// Genesis hash.
pub genesis_hash: HashFor<C>,
/// Runtime version.
pub runtime_version: RuntimeVersion,
/// Metadata.
pub metadata: Metadata,
}
/// Runtime version information needed to submit transactions.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct RuntimeVersion {
/// Version of the runtime specification. A full-node will not attempt to use its native
/// runtime in substitute for the on-chain Wasm runtime unless all of `spec_name`,
/// `spec_version` and `authoring_version` are the same between Wasm and native.
pub spec_version: u32,
/// All existing dispatches are fully compatible when this number doesn't change. If this
/// number changes, then `spec_version` must change, also.
///
/// This number must change when an existing dispatchable (module ID, dispatch ID) is changed,
/// either through an alteration in its user-level semantics, a parameter
/// added/removed/changed, a dispatchable being removed, a module being removed, or a
/// dispatchable/module changing its index.
///
/// It need *not* change when a new module is added or when a dispatchable is added.
pub transaction_version: u32,
}
+392
View File
@@ -0,0 +1,392 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Bizinikiwi specific configuration
use super::{Config, DefaultExtrinsicParams, DefaultExtrinsicParamsBuilder, Hasher, Header};
pub use crate::utils::{AccountId32, MultiAddress, MultiSignature};
use alloc::{format, vec::Vec};
use codec::{Decode, Encode};
use pezkuwi_subxt_metadata::Metadata;
pub use primitive_types::{H256, U256};
use serde::{Deserialize, Serialize};
/// Default set of commonly used types by Bizinikiwi runtimes.
// Note: We only use this at the type level, so it should be impossible to
// create an instance of it.
// The trait implementations exist just to make life easier,
// but shouldn't strictly be necessary since users can't instantiate this type.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub enum BizinikiwConfig {}
impl Config for BizinikiwConfig {
type AccountId = AccountId32;
type Address = MultiAddress<Self::AccountId, u32>;
type Signature = MultiSignature;
type Hasher = DynamicHasher256;
type Header = BizinikiwiHeader<u32, DynamicHasher256>;
type ExtrinsicParams = BizinikiwiExtrinsicParams<Self>;
type AssetId = u32;
}
/// A struct representing the signed extra and additional parameters required
/// to construct a transaction for the default bizinikiwi node.
pub type BizinikiwiExtrinsicParams<T> = DefaultExtrinsicParams<T>;
/// A builder which leads to [`BizinikiwiExtrinsicParams`] being constructed.
/// This is what you provide to methods like `sign_and_submit()`.
pub type BizinikiwiExtrinsicParamsBuilder<T> = DefaultExtrinsicParamsBuilder<T>;
/// A hasher (ie implements [`Hasher`]) which hashes values using the blaks2_256 algorithm.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct BlakeTwo256;
impl Hasher for BlakeTwo256 {
type Output = H256;
fn new(_metadata: &Metadata) -> Self {
Self
}
fn hash(&self, s: &[u8]) -> Self::Output {
pezsp_crypto_hashing::blake2_256(s).into()
}
}
/// A hasher (ie implements [`Hasher`]) which inspects the runtime metadata to decide how to
/// hash types, falling back to blake2_256 if the hasher information is not available.
///
/// Currently this hasher supports only `BlakeTwo256` and `Keccak256` hashing methods.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct DynamicHasher256(HashType);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum HashType {
// Most chains use this:
BlakeTwo256,
// Chains like Hyperbridge use this (tends to be eth compatible chains)
Keccak256,
// If we don't have V16 metadata, we'll emit this and default to BlakeTwo256.
Unknown,
}
impl Hasher for DynamicHasher256 {
type Output = H256;
fn new(metadata: &Metadata) -> Self {
// Determine the Hash associated type used for the current chain, if possible.
let Some(system_pallet) = metadata.pallet_by_name("System") else {
return Self(HashType::Unknown);
};
let Some(hash_ty_id) = system_pallet.associated_type_id("Hashing") else {
return Self(HashType::Unknown);
};
let ty = metadata
.types()
.resolve(hash_ty_id)
.expect("Type information for 'Hashing' associated type should be in metadata");
let hash_type = match ty.path.ident().as_deref().unwrap_or("") {
"BlakeTwo256" => HashType::BlakeTwo256,
"Keccak256" => HashType::Keccak256,
_ => HashType::Unknown,
};
Self(hash_type)
}
fn hash(&self, s: &[u8]) -> Self::Output {
match self.0 {
HashType::BlakeTwo256 | HashType::Unknown => pezsp_crypto_hashing::blake2_256(s).into(),
HashType::Keccak256 => pezsp_crypto_hashing::keccak_256(s).into(),
}
}
}
/// A generic Bizinikiwi header type, adapted from `sp_runtime::generic::Header`.
/// The block number and hasher can be configured to adapt this for other nodes.
#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BizinikiwiHeader<N: Copy + Into<U256> + TryFrom<U256>, H: Hasher> {
/// The parent hash.
pub parent_hash: H::Output,
/// The block number.
#[serde(serialize_with = "serialize_number", deserialize_with = "deserialize_number")]
#[codec(compact)]
pub number: N,
/// The state trie merkle root
pub state_root: H::Output,
/// The merkle root of the extrinsics.
pub extrinsics_root: H::Output,
/// A chain-specific digest of data useful for light clients or referencing auxiliary data.
pub digest: Digest,
}
impl<N, H> Header for BizinikiwiHeader<N, H>
where
N: Copy + Into<u64> + Into<U256> + TryFrom<U256> + Encode,
H: Hasher,
BizinikiwiHeader<N, H>: Encode + Decode,
{
type Number = N;
type Hasher = H;
fn number(&self) -> Self::Number {
self.number
}
}
/// Generic header digest. From `sp_runtime::generic::digest`.
#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
pub struct Digest {
/// A list of digest items.
pub logs: Vec<DigestItem>,
}
/// Digest item that is able to encode/decode 'system' digest items and
/// provide opaque access to other items. From `sp_runtime::generic::digest`.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum DigestItem {
/// A pre-runtime digest.
///
/// These are messages from the consensus engine to the runtime, although
/// the consensus engine can (and should) read them itself to avoid
/// code and state duplication. It is erroneous for a runtime to produce
/// these, but this is not (yet) checked.
///
/// NOTE: the runtime is not allowed to panic or fail in an `on_initialize`
/// call if an expected `PreRuntime` digest is not present. It is the
/// responsibility of a external block verifier to check this. Runtime API calls
/// will initialize the block without pre-runtime digests, so initialization
/// cannot fail when they are missing.
PreRuntime(ConsensusEngineId, Vec<u8>),
/// A message from the runtime to the consensus engine. This should *never*
/// be generated by the native code of any consensus engine, but this is not
/// checked (yet).
Consensus(ConsensusEngineId, Vec<u8>),
/// Put a Seal on it. This is only used by native code, and is never seen
/// by runtimes.
Seal(ConsensusEngineId, Vec<u8>),
/// Some other thing. Unsupported and experimental.
Other(Vec<u8>),
/// An indication for the light clients that the runtime execution
/// environment is updated.
///
/// Currently this is triggered when:
/// 1. Runtime code blob is changed or
/// 2. `heap_pages` value is changed.
RuntimeEnvironmentUpdated,
}
// From sp_runtime::generic, DigestItem enum indexes are encoded using this:
#[repr(u32)]
#[derive(Encode, Decode)]
enum DigestItemType {
Other = 0u32,
Consensus = 4u32,
Seal = 5u32,
PreRuntime = 6u32,
RuntimeEnvironmentUpdated = 8u32,
}
impl Encode for DigestItem {
fn encode(&self) -> Vec<u8> {
let mut v = Vec::new();
match self {
Self::Consensus(val, data) => {
DigestItemType::Consensus.encode_to(&mut v);
(val, data).encode_to(&mut v);
},
Self::Seal(val, sig) => {
DigestItemType::Seal.encode_to(&mut v);
(val, sig).encode_to(&mut v);
},
Self::PreRuntime(val, data) => {
DigestItemType::PreRuntime.encode_to(&mut v);
(val, data).encode_to(&mut v);
},
Self::Other(val) => {
DigestItemType::Other.encode_to(&mut v);
val.encode_to(&mut v);
},
Self::RuntimeEnvironmentUpdated => {
DigestItemType::RuntimeEnvironmentUpdated.encode_to(&mut v);
},
}
v
}
}
impl Decode for DigestItem {
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
let item_type: DigestItemType = Decode::decode(input)?;
match item_type {
DigestItemType::PreRuntime => {
let vals: (ConsensusEngineId, Vec<u8>) = Decode::decode(input)?;
Ok(Self::PreRuntime(vals.0, vals.1))
},
DigestItemType::Consensus => {
let vals: (ConsensusEngineId, Vec<u8>) = Decode::decode(input)?;
Ok(Self::Consensus(vals.0, vals.1))
},
DigestItemType::Seal => {
let vals: (ConsensusEngineId, Vec<u8>) = Decode::decode(input)?;
Ok(Self::Seal(vals.0, vals.1))
},
DigestItemType::Other => Ok(Self::Other(Decode::decode(input)?)),
DigestItemType::RuntimeEnvironmentUpdated => Ok(Self::RuntimeEnvironmentUpdated),
}
}
}
/// Consensus engine unique ID. From `sp_runtime::ConsensusEngineId`.
pub type ConsensusEngineId = [u8; 4];
impl serde::Serialize for DigestItem {
fn serialize<S>(&self, seq: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.using_encoded(|bytes| impl_serde::serialize::serialize(bytes, seq))
}
}
impl<'a> serde::Deserialize<'a> for DigestItem {
fn deserialize<D>(de: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'a>,
{
let r = impl_serde::serialize::deserialize(de)?;
Decode::decode(&mut &r[..])
.map_err(|e| serde::de::Error::custom(format!("Decode error: {e}")))
}
}
fn serialize_number<S, T: Copy + Into<U256>>(val: &T, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let u256: U256 = (*val).into();
serde::Serialize::serialize(&u256, s)
}
fn deserialize_number<'a, D, T: TryFrom<U256>>(d: D) -> Result<T, D::Error>
where
D: serde::Deserializer<'a>,
{
// At the time of writing, Smoldot gives back block numbers in numeric rather
// than hex format. So let's support deserializing from both here:
let number_or_hex = NumberOrHex::deserialize(d)?;
let u256 = number_or_hex.into_u256();
TryFrom::try_from(u256).map_err(|_| serde::de::Error::custom("Try from failed"))
}
/// A number type that can be serialized both as a number or a string that encodes a number in a
/// string.
///
/// We allow two representations of the block number as input. Either we deserialize to the type
/// that is specified in the block type or we attempt to parse given hex value.
///
/// The primary motivation for having this type is to avoid overflows when using big integers in
/// JavaScript (which we consider as an important RPC API consumer).
#[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
#[serde(untagged)]
pub enum NumberOrHex {
/// The number represented directly.
Number(u64),
/// Hex representation of the number.
Hex(U256),
}
impl NumberOrHex {
/// Converts this number into an U256.
pub fn into_u256(self) -> U256 {
match self {
NumberOrHex::Number(n) => n.into(),
NumberOrHex::Hex(h) => h,
}
}
}
impl From<NumberOrHex> for U256 {
fn from(num_or_hex: NumberOrHex) -> U256 {
num_or_hex.into_u256()
}
}
macro_rules! into_number_or_hex {
($($t: ty)+) => {
$(
impl From<$t> for NumberOrHex {
fn from(x: $t) -> Self {
NumberOrHex::Number(x.into())
}
}
)+
}
}
into_number_or_hex!(u8 u16 u32 u64);
impl From<u128> for NumberOrHex {
fn from(n: u128) -> Self {
NumberOrHex::Hex(n.into())
}
}
impl From<U256> for NumberOrHex {
fn from(n: U256) -> Self {
NumberOrHex::Hex(n)
}
}
#[cfg(test)]
mod test {
use super::*;
// Smoldot returns numeric block numbers in the header at the time of writing;
// ensure we can deserialize them properly.
#[test]
fn can_deserialize_numeric_block_number() {
let numeric_block_number_json = r#"
{
"digest": {
"logs": []
},
"extrinsicsRoot": "0x0000000000000000000000000000000000000000000000000000000000000000",
"number": 4,
"parentHash": "0xcb2690b2c85ceab55be03fc7f7f5f3857e7efeb7a020600ebd4331e10be2f7a5",
"stateRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
"#;
let header: BizinikiwiHeader<u32, BlakeTwo256> =
serde_json::from_str(numeric_block_number_json).expect("valid block header");
assert_eq!(header.number(), 4);
}
// Bizinikiwi returns hex block numbers; ensure we can also deserialize those OK.
#[test]
fn can_deserialize_hex_block_number() {
let numeric_block_number_json = r#"
{
"digest": {
"logs": []
},
"extrinsicsRoot": "0x0000000000000000000000000000000000000000000000000000000000000000",
"number": "0x04",
"parentHash": "0xcb2690b2c85ceab55be03fc7f7f5f3857e7efeb7a020600ebd4331e10be2f7a5",
"stateRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
"#;
let header: BizinikiwiHeader<u32, BlakeTwo256> =
serde_json::from_str(numeric_block_number_json).expect("valid block header");
assert_eq!(header.number(), 4);
}
}
@@ -0,0 +1,167 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::config::transaction_extensions::CheckMortalityParams;
use super::{Config, ExtrinsicParams, HashFor, transaction_extensions};
/// The default [`super::ExtrinsicParams`] implementation understands common signed extensions
/// and how to apply them to a given chain.
pub type DefaultExtrinsicParams<T> = transaction_extensions::AnyOf<
T,
(
transaction_extensions::VerifySignature<T>,
transaction_extensions::CheckSpecVersion,
transaction_extensions::CheckTxVersion,
transaction_extensions::CheckNonce,
transaction_extensions::CheckGenesis<T>,
transaction_extensions::CheckMortality<T>,
transaction_extensions::ChargeAssetTxPayment<T>,
transaction_extensions::ChargeTransactionPayment,
transaction_extensions::CheckMetadataHash,
),
>;
/// A builder that outputs the set of [`super::ExtrinsicParams::Params`] required for
/// [`DefaultExtrinsicParams`]. This may expose methods that aren't applicable to the current
/// chain; such values will simply be ignored if so.
pub struct DefaultExtrinsicParamsBuilder<T: Config> {
/// `None` means the tx will be immortal, else it's mortality is described.
mortality: transaction_extensions::CheckMortalityParams<T>,
/// `None` means the nonce will be automatically set.
nonce: Option<u64>,
/// `None` means we'll use the native token.
tip_of_asset_id: Option<T::AssetId>,
tip: u128,
tip_of: u128,
}
impl<T: Config> Default for DefaultExtrinsicParamsBuilder<T> {
fn default() -> Self {
Self {
mortality: CheckMortalityParams::default(),
tip: 0,
tip_of: 0,
tip_of_asset_id: None,
nonce: None,
}
}
}
impl<T: Config> DefaultExtrinsicParamsBuilder<T> {
/// Configure new extrinsic params. We default to providing no tip
/// and using an immortal transaction unless otherwise configured
pub fn new() -> Self {
Default::default()
}
/// Make the transaction immortal, meaning it will never expire. This means that it could, in
/// theory, be pending for a long time and only be included many blocks into the future.
pub fn immortal(mut self) -> Self {
self.mortality = transaction_extensions::CheckMortalityParams::immortal();
self
}
/// Make the transaction mortal, given a number of blocks it will be mortal for from
/// the current block at the time of submission.
///
/// # Warning
///
/// This will ultimately return an error if used for creating extrinsic offline, because we need
/// additional information in order to set the mortality properly.
///
/// When creating offline transactions, you must use [`Self::mortal_from_unchecked`] instead to
/// set the mortality. This provides all of the necessary information which we must otherwise
/// be online in order to obtain.
pub fn mortal(mut self, for_n_blocks: u64) -> Self {
self.mortality = transaction_extensions::CheckMortalityParams::mortal(for_n_blocks);
self
}
/// Configure a transaction that will be mortal for the number of blocks given, and from the
/// block details provided. Prefer to use [`Self::mortal()`] where possible, which prevents
/// the block number and hash from being misaligned.
pub fn mortal_from_unchecked(
mut self,
for_n_blocks: u64,
from_block_n: u64,
from_block_hash: HashFor<T>,
) -> Self {
self.mortality = transaction_extensions::CheckMortalityParams::mortal_from_unchecked(
for_n_blocks,
from_block_n,
from_block_hash,
);
self
}
/// Provide a specific nonce for the submitter of the extrinsic
pub fn nonce(mut self, nonce: u64) -> Self {
self.nonce = Some(nonce);
self
}
/// Provide a tip to the block author in the chain's native token.
pub fn tip(mut self, tip: u128) -> Self {
self.tip = tip;
self.tip_of = tip;
self.tip_of_asset_id = None;
self
}
/// Provide a tip to the block author using the token denominated by the `asset_id` provided.
/// This is not applicable on chains which don't use the `ChargeAssetTxPayment` signed
/// extension; in this case, no tip will be given.
pub fn tip_of(mut self, tip: u128, asset_id: T::AssetId) -> Self {
self.tip = 0;
self.tip_of = tip;
self.tip_of_asset_id = Some(asset_id);
self
}
/// Build the extrinsic parameters.
pub fn build(self) -> <DefaultExtrinsicParams<T> as ExtrinsicParams<T>>::Params {
let check_mortality_params = self.mortality;
let charge_asset_tx_params = if let Some(asset_id) = self.tip_of_asset_id {
transaction_extensions::ChargeAssetTxPaymentParams::tip_of(self.tip, asset_id)
} else {
transaction_extensions::ChargeAssetTxPaymentParams::tip(self.tip)
};
let charge_transaction_params =
transaction_extensions::ChargeTransactionPaymentParams::tip(self.tip);
let check_nonce_params = if let Some(nonce) = self.nonce {
transaction_extensions::CheckNonceParams::with_nonce(nonce)
} else {
transaction_extensions::CheckNonceParams::from_chain()
};
(
(),
(),
(),
check_nonce_params,
(),
check_mortality_params,
charge_asset_tx_params,
charge_transaction_params,
(),
)
}
}
#[cfg(test)]
mod test {
use super::*;
fn assert_default<T: Default>(_t: T) {}
#[test]
fn params_are_default() {
let params = DefaultExtrinsicParamsBuilder::<crate::config::PezkuwiConfig>::new().build();
assert_default(params)
}
}
+128
View File
@@ -0,0 +1,128 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module contains a trait which controls the parameters that must
//! be provided in order to successfully construct an extrinsic.
//! [`crate::config::DefaultExtrinsicParams`] provides a general-purpose
//! implementation of this that will work in many cases.
use crate::{
client::ClientState,
config::{Config, HashFor},
error::ExtrinsicParamsError,
};
use alloc::vec::Vec;
use core::any::Any;
/// This trait allows you to configure the "signed extra" and
/// "additional" parameters that are a part of the transaction payload
/// or the signer payload respectively.
pub trait ExtrinsicParams<T: Config>: ExtrinsicParamsEncoder + Sized + Send + 'static {
/// These parameters can be provided to the constructor along with
/// some default parameters that `subxt` understands, in order to
/// help construct your [`ExtrinsicParams`] object.
type Params: Params<T>;
/// Construct a new instance of our [`ExtrinsicParams`].
fn new(client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError>;
}
/// This trait is expected to be implemented for any [`ExtrinsicParams`], and
/// defines how to encode the "additional" and "extra" params. Both functions
/// are optional and will encode nothing by default.
pub trait ExtrinsicParamsEncoder: 'static {
/// This is expected to SCALE encode the transaction extension data to some
/// buffer that has been provided. This data is attached to the transaction
/// and also (by default) attached to the signer payload which is signed to
/// provide a signature for the transaction.
///
/// If [`ExtrinsicParamsEncoder::encode_signer_payload_value_to`] is implemented,
/// then that will be used instead when generating a signer payload. Useful for
/// eg the `VerifySignature` extension, which is send with the transaction but
/// is not a part of the signer payload.
fn encode_value_to(&self, _v: &mut Vec<u8>) {}
/// See [`ExtrinsicParamsEncoder::encode_value_to`]. This defaults to calling that
/// method, but if implemented will dictate what is encoded to the signer payload.
fn encode_signer_payload_value_to(&self, v: &mut Vec<u8>) {
self.encode_value_to(v);
}
/// This is expected to SCALE encode the "implicit" (formally "additional")
/// parameters to some buffer that has been provided. These parameters are
/// _not_ sent along with the transaction, but are taken into account when
/// signing it, meaning the client and node must agree on their values.
fn encode_implicit_to(&self, _v: &mut Vec<u8>) {}
/// Set the signature. This happens after we have constructed the extrinsic params,
/// and so is defined here rather than on the params, below. We need to use `&dyn Any`
/// to keep this trait object safe, but can downcast in the impls.
///
/// # Panics
///
/// Implementations of this will likely try to downcast the provided `account_id`
/// and `signature` into `T::AccountId` and `T::Signature` (where `T: Config`), and are
/// free to panic if this downcasting does not succeed.
///
/// In typical usage, this is not a problem, since this method is only called internally
/// and provided values which line up with the relevant `Config`. In theory though, this
/// method can be called manually with any types, hence this warning.
fn inject_signature(&mut self, _account_id: &dyn Any, _signature: &dyn Any) {}
}
/// The parameters (ie [`ExtrinsicParams::Params`]) can also have data injected into them,
/// allowing Subxt to retrieve data from the chain and amend the parameters with it when
/// online.
pub trait Params<T: Config> {
/// Set the account nonce.
fn inject_account_nonce(&mut self, _nonce: u64) {}
/// Set the current block.
fn inject_block(&mut self, _number: u64, _hash: HashFor<T>) {}
}
impl<T: Config> Params<T> for () {}
macro_rules! impl_tuples {
($($ident:ident $index:tt),+) => {
impl <Conf: Config, $($ident : Params<Conf>),+> Params<Conf> for ($($ident,)+){
fn inject_account_nonce(&mut self, nonce: u64) {
$(self.$index.inject_account_nonce(nonce);)+
}
fn inject_block(&mut self, number: u64, hash: HashFor<Conf>) {
$(self.$index.inject_block(number, hash);)+
}
}
}
}
#[rustfmt::skip]
const _: () = {
impl_tuples!(A 0);
impl_tuples!(A 0, B 1);
impl_tuples!(A 0, B 1, C 2);
impl_tuples!(A 0, B 1, C 2, D 3);
impl_tuples!(A 0, B 1, C 2, D 3, E 4);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19, U 20);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19, U 20, V 21);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19, U 20, V 21, W 22);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19, U 20, V 21, W 22, X 23);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19, U 20, V 21, W 22, X 23, Y 24);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, T 19, U 20, V 21, W 22, X 23, Y 24, Z 25);
};
+130
View File
@@ -0,0 +1,130 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module provides a [`Config`] type, which is used to define various
//! types that are important in order to speak to a particular chain.
//! [`BizinikiwConfig`] provides a default set of these types suitable for the
//! default Bizinikiwi node implementation, and [`PezkuwiConfig`] for a
//! Pezkuwi node.
mod default_extrinsic_params;
mod extrinsic_params;
pub mod pezkuwi;
pub mod bizinikiwi;
pub mod transaction_extensions;
use codec::{Decode, Encode};
use core::fmt::Debug;
use pezkuwi_subxt_metadata::Metadata;
use scale_decode::DecodeAsType;
use scale_encode::EncodeAsType;
use serde::{Serialize, de::DeserializeOwned};
pub use default_extrinsic_params::{DefaultExtrinsicParams, DefaultExtrinsicParamsBuilder};
pub use extrinsic_params::{ExtrinsicParams, ExtrinsicParamsEncoder};
pub use pezkuwi::{PezkuwiConfig, PezkuwiExtrinsicParams, PezkuwiExtrinsicParamsBuilder};
pub use bizinikiwi::{BizinikiwConfig, BizinikiwiExtrinsicParams, BizinikiwiExtrinsicParamsBuilder};
pub use transaction_extensions::TransactionExtension;
/// Runtime types.
// Note: the `Send + Sync + 'static` bound isn't strictly required, but currently deriving
// TypeInfo automatically applies a 'static bound to all generic types (including this one),
// And we want the compiler to infer `Send` and `Sync` OK for things which have `T: Config`
// rather than having to `unsafe impl` them ourselves.
pub trait Config: Sized + Send + Sync + 'static {
/// The account ID type.
type AccountId: Debug + Clone + Encode + Decode + Serialize + Send;
/// The address type.
type Address: Debug + Encode + From<Self::AccountId>;
/// The signature type.
type Signature: Debug + Clone + Encode + Decode + Send;
/// The hashing system (algorithm) being used in the runtime (e.g. Blake2).
type Hasher: Debug + Clone + Copy + Hasher + Send + Sync;
/// The block header.
type Header: Debug + Header<Hasher = Self::Hasher> + Sync + Send + DeserializeOwned + Clone;
/// This type defines the extrinsic extra and additional parameters.
type ExtrinsicParams: ExtrinsicParams<Self>;
/// This is used to identify an asset in the `ChargeAssetTxPayment` signed extension.
type AssetId: Debug + Clone + Encode + DecodeAsType + EncodeAsType + Send;
}
/// Given some [`Config`], this returns the type of hash used.
pub type HashFor<T> = <<T as Config>::Hasher as Hasher>::Output;
/// given some [`Config`], this return the other params needed for its `ExtrinsicParams`.
pub type ParamsFor<T> = <<T as Config>::ExtrinsicParams as ExtrinsicParams<T>>::Params;
/// Block hashes must conform to a bunch of things to be used in Subxt.
pub trait Hash:
Debug
+ Copy
+ Send
+ Sync
+ Decode
+ AsRef<[u8]>
+ Serialize
+ DeserializeOwned
+ Encode
+ PartialEq
+ Eq
+ core::hash::Hash
{
}
impl<T> Hash for T where
T: Debug
+ Copy
+ Send
+ Sync
+ Decode
+ AsRef<[u8]>
+ Serialize
+ DeserializeOwned
+ Encode
+ PartialEq
+ Eq
+ core::hash::Hash
{
}
/// This represents the hasher used by a node to hash things like block headers
/// and extrinsics.
pub trait Hasher {
/// The type given back from the hash operation
type Output: Hash;
/// Construct a new hasher.
fn new(metadata: &Metadata) -> Self;
/// Hash some bytes to the given output type.
fn hash(&self, s: &[u8]) -> Self::Output;
/// Hash some SCALE encodable type to the given output type.
fn hash_of<S: Encode>(&self, s: &S) -> Self::Output {
let out = s.encode();
self.hash(&out)
}
}
/// This represents the block header type used by a node.
pub trait Header: Sized + Encode + Decode {
/// The block number type for this header.
type Number: Into<u64>;
/// The hasher used to hash this header.
type Hasher: Hasher;
/// Return the block number of this header.
fn number(&self) -> Self::Number;
/// Hash this header.
fn hash_with(&self, hasher: Self::Hasher) -> <Self::Hasher as Hasher>::Output {
hasher.hash_of(self)
}
}
+41
View File
@@ -0,0 +1,41 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Pezkuwi specific configuration
use super::{Config, DefaultExtrinsicParams, DefaultExtrinsicParamsBuilder};
use crate::config::BizinikiwConfig;
pub use crate::utils::{AccountId32, MultiAddress, MultiSignature};
pub use primitive_types::{H256, U256};
/// Default set of commonly used types by Pezkuwi nodes.
// Note: The trait implementations exist just to make life easier,
// but shouldn't strictly be necessary since users can't instantiate this type.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub enum PezkuwiConfig {}
impl Config for PezkuwiConfig {
type AccountId = <BizinikiwConfig as Config>::AccountId;
type Signature = <BizinikiwConfig as Config>::Signature;
type Hasher = <BizinikiwConfig as Config>::Hasher;
type Header = <BizinikiwConfig as Config>::Header;
type AssetId = <BizinikiwConfig as Config>::AssetId;
// Address on Pezkuwi has no account index, whereas it's u32 on
// the default bizinikiwi dev node.
type Address = MultiAddress<Self::AccountId, ()>;
// These are the same as the default bizinikiwi node, but redefined
// because we need to pass the PezkuwiConfig trait as a param.
type ExtrinsicParams = PezkuwiExtrinsicParams<Self>;
}
/// A struct representing the signed extra and additional parameters required
/// to construct a transaction for a pezkuwi node.
pub type PezkuwiExtrinsicParams<T> = DefaultExtrinsicParams<T>;
/// A builder which leads to [`PezkuwiExtrinsicParams`] being constructed.
/// This is what you provide to methods like `sign_and_submit()`.
pub type PezkuwiExtrinsicParamsBuilder<T> = DefaultExtrinsicParamsBuilder<T>;
@@ -0,0 +1,679 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module contains implementations for common transaction extensions, each
//! of which implements [`TransactionExtension`], and can be used in conjunction with
//! [`AnyOf`] to configure the set of transaction extensions which are known about
//! when interacting with a chain.
use super::extrinsic_params::ExtrinsicParams;
use crate::{
client::ClientState,
config::{Config, ExtrinsicParamsEncoder, HashFor},
error::ExtrinsicParamsError,
utils::{Era, Static},
};
use alloc::{borrow::ToOwned, boxed::Box, vec::Vec};
use codec::{Compact, Encode};
use core::{any::Any, fmt::Debug};
use derive_where::derive_where;
use hashbrown::HashMap;
use scale_decode::DecodeAsType;
use scale_info::PortableRegistry;
// Re-export this here; it's a bit generically named to be re-exported from ::config.
pub use super::extrinsic_params::Params;
/// A single [`TransactionExtension`] has a unique name, but is otherwise the
/// same as [`ExtrinsicParams`] in describing how to encode the extra and
/// additional data.
pub trait TransactionExtension<T: Config>: ExtrinsicParams<T> {
/// The type representing the `extra` / value bytes of a transaction extension.
/// Decoding from this type should be symmetrical to the respective
/// `ExtrinsicParamsEncoder::encode_value_to()` implementation of this transaction extension.
type Decoded: DecodeAsType;
/// This should return true if the transaction extension matches the details given.
/// Often, this will involve just checking that the identifier given matches that of the
/// extension in question.
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool;
}
/// The [`VerifySignature`] extension. For V5 General transactions, this is how a signature
/// is provided. The signature is constructed by signing a payload which contains the
/// transaction call data as well as the encoded "additional" bytes for any extensions _after_
/// this one in the list.
pub struct VerifySignature<T: Config>(VerifySignatureDetails<T>);
impl<T: Config> ExtrinsicParams<T> for VerifySignature<T> {
type Params = ();
fn new(_client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(VerifySignature(VerifySignatureDetails::Disabled))
}
}
impl<T: Config> ExtrinsicParamsEncoder for VerifySignature<T> {
fn encode_value_to(&self, v: &mut Vec<u8>) {
self.0.encode_to(v);
}
fn encode_signer_payload_value_to(&self, v: &mut Vec<u8>) {
// This extension is never encoded to the signer payload, and extensions
// prior to this are ignored when creating said payload, so clear anything
// we've seen so far.
v.clear();
}
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
// We only use the "implicit" data for extensions _after_ this one
// in the pipeline to form the signer payload. Thus, clear anything
// we've seen so far.
v.clear();
}
fn inject_signature(&mut self, account: &dyn Any, signature: &dyn Any) {
// Downcast refs back to concrete types (we use `&dyn Any`` so that the trait remains object
// safe)
let account = account
.downcast_ref::<T::AccountId>()
.expect("A T::AccountId should have been provided")
.clone();
let signature = signature
.downcast_ref::<T::Signature>()
.expect("A T::Signature should have been provided")
.clone();
// The signature is not set through params, only here, once given by a user:
self.0 = VerifySignatureDetails::Signed { signature, account }
}
}
impl<T: Config> TransactionExtension<T> for VerifySignature<T> {
type Decoded = Static<VerifySignatureDetails<T>>;
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "VerifySignature"
}
}
/// This allows a signature to be provided to the [`VerifySignature`] transaction extension.
// Dev note: this must encode identically to https://github.com/pezkuwichain/pezkuwi-sdk/blob/fd72d58313c297a10600037ce1bb88ec958d722e/bizinikiwi/frame/verify-signature/src/extension.rs#L43
#[derive(codec::Encode, codec::Decode)]
pub enum VerifySignatureDetails<T: Config> {
/// A signature has been provided.
Signed {
/// The signature.
signature: T::Signature,
/// The account that generated the signature.
account: T::AccountId,
},
/// No signature was provided.
Disabled,
}
/// The [`CheckMetadataHash`] transaction extension.
pub struct CheckMetadataHash {
// Eventually we might provide or calculate the metadata hash here,
// but for now we never provide a hash and so this is empty.
}
impl<T: Config> ExtrinsicParams<T> for CheckMetadataHash {
type Params = ();
fn new(_client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckMetadataHash {})
}
}
impl ExtrinsicParamsEncoder for CheckMetadataHash {
fn encode_value_to(&self, v: &mut Vec<u8>) {
// A single 0 byte in the TX payload indicates that the chain should
// _not_ expect any metadata hash to exist in the signer payload.
0u8.encode_to(v);
}
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
// We provide no metadata hash in the signer payload to align with the above.
None::<()>.encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for CheckMetadataHash {
type Decoded = CheckMetadataHashMode;
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "CheckMetadataHash"
}
}
/// Is metadata checking enabled or disabled?
// Dev note: The "Disabled" and "Enabled" variant names match those that the
// transaction extension will be encoded with, in order that DecodeAsType will work
// properly.
#[derive(Copy, Clone, Debug, DecodeAsType)]
pub enum CheckMetadataHashMode {
/// No hash was provided in the signer payload.
Disabled,
/// A hash was provided in the signer payload.
Enabled,
}
impl CheckMetadataHashMode {
/// Is metadata checking enabled or disabled for this transaction?
pub fn is_enabled(&self) -> bool {
match self {
CheckMetadataHashMode::Disabled => false,
CheckMetadataHashMode::Enabled => true,
}
}
}
/// The [`CheckSpecVersion`] transaction extension.
pub struct CheckSpecVersion(u32);
impl<T: Config> ExtrinsicParams<T> for CheckSpecVersion {
type Params = ();
fn new(client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckSpecVersion(client.runtime_version.spec_version))
}
}
impl ExtrinsicParamsEncoder for CheckSpecVersion {
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
self.0.encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for CheckSpecVersion {
type Decoded = ();
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "CheckSpecVersion"
}
}
/// The [`CheckNonce`] transaction extension.
pub struct CheckNonce(u64);
impl<T: Config> ExtrinsicParams<T> for CheckNonce {
type Params = CheckNonceParams;
fn new(_client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckNonce(params.0.unwrap_or(0)))
}
}
impl ExtrinsicParamsEncoder for CheckNonce {
fn encode_value_to(&self, v: &mut Vec<u8>) {
Compact(self.0).encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for CheckNonce {
type Decoded = u64;
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "CheckNonce"
}
}
/// Configure the nonce used.
#[derive(Debug, Clone, Default)]
pub struct CheckNonceParams(Option<u64>);
impl CheckNonceParams {
/// Retrieve the nonce from the chain and use that.
pub fn from_chain() -> Self {
Self(None)
}
/// Manually set an account nonce to use.
pub fn with_nonce(nonce: u64) -> Self {
Self(Some(nonce))
}
}
impl<T: Config> Params<T> for CheckNonceParams {
fn inject_account_nonce(&mut self, nonce: u64) {
if self.0.is_none() {
self.0 = Some(nonce)
}
}
}
/// The [`CheckTxVersion`] transaction extension.
pub struct CheckTxVersion(u32);
impl<T: Config> ExtrinsicParams<T> for CheckTxVersion {
type Params = ();
fn new(client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckTxVersion(client.runtime_version.transaction_version))
}
}
impl ExtrinsicParamsEncoder for CheckTxVersion {
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
self.0.encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for CheckTxVersion {
type Decoded = ();
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "CheckTxVersion"
}
}
/// The [`CheckGenesis`] transaction extension.
pub struct CheckGenesis<T: Config>(HashFor<T>);
impl<T: Config> ExtrinsicParams<T> for CheckGenesis<T> {
type Params = ();
fn new(client: &ClientState<T>, _params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(CheckGenesis(client.genesis_hash))
}
}
impl<T: Config> ExtrinsicParamsEncoder for CheckGenesis<T> {
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
self.0.encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for CheckGenesis<T> {
type Decoded = ();
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "CheckGenesis"
}
}
/// The [`CheckMortality`] transaction extension.
pub struct CheckMortality<T: Config> {
params: CheckMortalityParamsInner<T>,
genesis_hash: HashFor<T>,
}
impl<T: Config> ExtrinsicParams<T> for CheckMortality<T> {
type Params = CheckMortalityParams<T>;
fn new(client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
// If a user has explicitly configured the transaction to be mortal for n blocks, but we get
// to this stage and no injected information was able to turn this into MortalFromBlock{..},
// then we hit an error as we are unable to construct a mortal transaction here.
if matches!(&params.0, CheckMortalityParamsInner::MortalForBlocks(_)) {
return Err(ExtrinsicParamsError::custom(
"CheckMortality: We cannot construct an offline extrinsic with only the number of blocks it is mortal for. Use mortal_from_unchecked instead.",
));
}
Ok(CheckMortality {
// if nothing has been explicitly configured, we will have a mortal transaction
// valid for 32 blocks if block info is available.
params: params.0,
genesis_hash: client.genesis_hash,
})
}
}
impl<T: Config> ExtrinsicParamsEncoder for CheckMortality<T> {
fn encode_value_to(&self, v: &mut Vec<u8>) {
match &self.params {
CheckMortalityParamsInner::MortalFromBlock { for_n_blocks, from_block_n, .. } => {
Era::mortal(*for_n_blocks, *from_block_n).encode_to(v);
},
_ => {
// Note: if we see `CheckMortalityInner::MortalForBlocks`, then it means the user
// has configured a block to be mortal for N blocks, but the current block was
// never injected, so we don't know where to start from and default back to
// building an immortal tx.
Era::Immortal.encode_to(v);
},
}
}
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
match &self.params {
CheckMortalityParamsInner::MortalFromBlock { from_block_hash, .. } => {
from_block_hash.encode_to(v);
},
_ => {
self.genesis_hash.encode_to(v);
},
}
}
}
impl<T: Config> TransactionExtension<T> for CheckMortality<T> {
type Decoded = Era;
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "CheckMortality"
}
}
/// Parameters to configure the [`CheckMortality`] transaction extension.
pub struct CheckMortalityParams<T: Config>(CheckMortalityParamsInner<T>);
enum CheckMortalityParamsInner<T: Config> {
/// The transaction will be immortal.
Immortal,
/// The transaction is mortal for N blocks. This must be "upgraded" into
/// [`CheckMortalityParamsInner::MortalFromBlock`] to ultimately work.
MortalForBlocks(u64),
/// The transaction is mortal for N blocks, but if it cannot be "upgraded",
/// then it will be set to immortal instead. This is the default if unset.
MortalForBlocksOrImmortalIfNotPossible(u64),
/// The transaction is mortal and all of the relevant information is provided.
MortalFromBlock { for_n_blocks: u64, from_block_n: u64, from_block_hash: HashFor<T> },
}
impl<T: Config> Default for CheckMortalityParams<T> {
fn default() -> Self {
// default to being mortal for 32 blocks if possible, else immortal:
CheckMortalityParams(CheckMortalityParamsInner::MortalForBlocksOrImmortalIfNotPossible(32))
}
}
impl<T: Config> CheckMortalityParams<T> {
/// Configure a transaction that will be mortal for the number of blocks given.
pub fn mortal(for_n_blocks: u64) -> Self {
Self(CheckMortalityParamsInner::MortalForBlocks(for_n_blocks))
}
/// Configure a transaction that will be mortal for the number of blocks given,
/// and from the block details provided. Prefer to use [`CheckMortalityParams::mortal()`]
/// where possible, which prevents the block number and hash from being misaligned.
pub fn mortal_from_unchecked(
for_n_blocks: u64,
from_block_n: u64,
from_block_hash: HashFor<T>,
) -> Self {
Self(CheckMortalityParamsInner::MortalFromBlock {
for_n_blocks,
from_block_n,
from_block_hash,
})
}
/// An immortal transaction.
pub fn immortal() -> Self {
Self(CheckMortalityParamsInner::Immortal)
}
}
impl<T: Config> Params<T> for CheckMortalityParams<T> {
fn inject_block(&mut self, from_block_n: u64, from_block_hash: HashFor<T>) {
match &self.0 {
CheckMortalityParamsInner::MortalForBlocks(n) |
CheckMortalityParamsInner::MortalForBlocksOrImmortalIfNotPossible(n) =>
self.0 = CheckMortalityParamsInner::MortalFromBlock {
for_n_blocks: *n,
from_block_n,
from_block_hash,
},
_ => {
// Don't change anything if explicit Immortal or explicit block set.
},
}
}
}
/// The [`ChargeAssetTxPayment`] transaction extension.
#[derive(DecodeAsType)]
#[derive_where(Clone, Debug; T::AssetId)]
#[decode_as_type(trait_bounds = "T::AssetId: DecodeAsType")]
pub struct ChargeAssetTxPayment<T: Config> {
tip: Compact<u128>,
asset_id: Option<T::AssetId>,
}
impl<T: Config> ChargeAssetTxPayment<T> {
/// Tip to the extrinsic author in the native chain token.
pub fn tip(&self) -> u128 {
self.tip.0
}
/// Tip to the extrinsic author using the asset ID given.
pub fn asset_id(&self) -> Option<&T::AssetId> {
self.asset_id.as_ref()
}
}
impl<T: Config> ExtrinsicParams<T> for ChargeAssetTxPayment<T> {
type Params = ChargeAssetTxPaymentParams<T>;
fn new(_client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(ChargeAssetTxPayment { tip: Compact(params.tip), asset_id: params.asset_id })
}
}
impl<T: Config> ExtrinsicParamsEncoder for ChargeAssetTxPayment<T> {
fn encode_value_to(&self, v: &mut Vec<u8>) {
(self.tip, &self.asset_id).encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for ChargeAssetTxPayment<T> {
type Decoded = Self;
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "ChargeAssetTxPayment"
}
}
/// Parameters to configure the [`ChargeAssetTxPayment`] transaction extension.
pub struct ChargeAssetTxPaymentParams<T: Config> {
tip: u128,
asset_id: Option<T::AssetId>,
}
impl<T: Config> Default for ChargeAssetTxPaymentParams<T> {
fn default() -> Self {
ChargeAssetTxPaymentParams { tip: Default::default(), asset_id: Default::default() }
}
}
impl<T: Config> ChargeAssetTxPaymentParams<T> {
/// Don't provide a tip to the extrinsic author.
pub fn no_tip() -> Self {
ChargeAssetTxPaymentParams { tip: 0, asset_id: None }
}
/// Tip the extrinsic author in the native chain token.
pub fn tip(tip: u128) -> Self {
ChargeAssetTxPaymentParams { tip, asset_id: None }
}
/// Tip the extrinsic author using the asset ID given.
pub fn tip_of(tip: u128, asset_id: T::AssetId) -> Self {
ChargeAssetTxPaymentParams { tip, asset_id: Some(asset_id) }
}
}
impl<T: Config> Params<T> for ChargeAssetTxPaymentParams<T> {}
/// The [`ChargeTransactionPayment`] transaction extension.
#[derive(Clone, Debug, DecodeAsType)]
pub struct ChargeTransactionPayment {
tip: Compact<u128>,
}
impl ChargeTransactionPayment {
/// Tip to the extrinsic author in the native chain token.
pub fn tip(&self) -> u128 {
self.tip.0
}
}
impl<T: Config> ExtrinsicParams<T> for ChargeTransactionPayment {
type Params = ChargeTransactionPaymentParams;
fn new(_client: &ClientState<T>, params: Self::Params) -> Result<Self, ExtrinsicParamsError> {
Ok(ChargeTransactionPayment { tip: Compact(params.tip) })
}
}
impl ExtrinsicParamsEncoder for ChargeTransactionPayment {
fn encode_value_to(&self, v: &mut Vec<u8>) {
self.tip.encode_to(v);
}
}
impl<T: Config> TransactionExtension<T> for ChargeTransactionPayment {
type Decoded = Self;
fn matches(identifier: &str, _type_id: u32, _types: &PortableRegistry) -> bool {
identifier == "ChargeTransactionPayment"
}
}
/// Parameters to configure the [`ChargeTransactionPayment`] transaction extension.
#[derive(Default)]
pub struct ChargeTransactionPaymentParams {
tip: u128,
}
impl ChargeTransactionPaymentParams {
/// Don't provide a tip to the extrinsic author.
pub fn no_tip() -> Self {
ChargeTransactionPaymentParams { tip: 0 }
}
/// Tip the extrinsic author in the native chain token.
pub fn tip(tip: u128) -> Self {
ChargeTransactionPaymentParams { tip }
}
}
impl<T: Config> Params<T> for ChargeTransactionPaymentParams {}
/// This accepts a tuple of [`TransactionExtension`]s, and will dynamically make use of whichever
/// ones are actually required for the chain in the correct order, ignoring the rest. This
/// is a sensible default, and allows for a single configuration to work across multiple chains.
pub struct AnyOf<T, Params> {
params: Vec<Box<dyn ExtrinsicParamsEncoder + Send + 'static>>,
_marker: core::marker::PhantomData<(T, Params)>,
}
macro_rules! impl_tuples {
($($ident:ident $index:tt),+) => {
// We do some magic when the tuple is wrapped in AnyOf. We
// look at the metadata, and use this to select and make use of only the extensions
// that we actually need for the chain we're dealing with.
impl <T, $($ident),+> ExtrinsicParams<T> for AnyOf<T, ($($ident,)+)>
where
T: Config,
$($ident: TransactionExtension<T>,)+
{
type Params = ($($ident::Params,)+);
fn new(
client: &ClientState<T>,
params: Self::Params,
) -> Result<Self, ExtrinsicParamsError> {
let metadata = &client.metadata;
let types = metadata.types();
// For each transaction extension in the tuple, find the matching index in the metadata, if
// there is one, and add it to a map with that index as the key.
let mut exts_by_index = HashMap::new();
$({
for (idx, e) in metadata.extrinsic().transaction_extensions_to_use_for_encoding().enumerate() {
// Skip over any exts that have a match already:
if exts_by_index.contains_key(&idx) {
continue
}
// Break and record as soon as we find a match:
if $ident::matches(e.identifier(), e.extra_ty(), types) {
let ext = $ident::new(client, params.$index)?;
let boxed_ext: Box<dyn ExtrinsicParamsEncoder + Send + 'static> = Box::new(ext);
exts_by_index.insert(idx, boxed_ext);
break
}
}
})+
// Next, turn these into an ordered vec, erroring if we haven't matched on any exts yet.
let mut params = Vec::new();
for (idx, e) in metadata.extrinsic().transaction_extensions_to_use_for_encoding().enumerate() {
let Some(ext) = exts_by_index.remove(&idx) else {
if is_type_empty(e.extra_ty(), types) {
continue
} else {
return Err(ExtrinsicParamsError::UnknownTransactionExtension(e.identifier().to_owned()));
}
};
params.push(ext);
}
Ok(AnyOf {
params,
_marker: core::marker::PhantomData
})
}
}
impl <T, $($ident),+> ExtrinsicParamsEncoder for AnyOf<T, ($($ident,)+)>
where
T: Config,
$($ident: TransactionExtension<T>,)+
{
fn encode_value_to(&self, v: &mut Vec<u8>) {
for ext in &self.params {
ext.encode_value_to(v);
}
}
fn encode_signer_payload_value_to(&self, v: &mut Vec<u8>) {
for ext in &self.params {
ext.encode_signer_payload_value_to(v);
}
}
fn encode_implicit_to(&self, v: &mut Vec<u8>) {
for ext in &self.params {
ext.encode_implicit_to(v);
}
}
fn inject_signature(&mut self, account_id: &dyn Any, signature: &dyn Any) {
for ext in &mut self.params {
ext.inject_signature(account_id, signature);
}
}
}
}
}
#[rustfmt::skip]
const _: () = {
impl_tuples!(A 0);
impl_tuples!(A 0, B 1);
impl_tuples!(A 0, B 1, C 2);
impl_tuples!(A 0, B 1, C 2, D 3);
impl_tuples!(A 0, B 1, C 2, D 3, E 4);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, U 19);
impl_tuples!(A 0, B 1, C 2, D 3, E 4, F 5, G 6, H 7, I 8, J 9, K 10, L 11, M 12, N 13, O 14, P 15, Q 16, R 17, S 18, U 19, V 20);
};
/// Checks to see whether the type being given is empty, ie would require
/// 0 bytes to encode.
fn is_type_empty(type_id: u32, types: &scale_info::PortableRegistry) -> bool {
let Some(ty) = types.resolve(type_id) else {
// Can't resolve; type may not be empty. Not expected to hit this.
return false;
};
use scale_info::TypeDef;
match &ty.type_def {
TypeDef::Composite(c) => c.fields.iter().all(|f| is_type_empty(f.ty.id, types)),
TypeDef::Array(a) => a.len == 0 || is_type_empty(a.type_param.id, types),
TypeDef::Tuple(t) => t.fields.iter().all(|f| is_type_empty(f.id, types)),
// Explicitly list these in case any additions are made in the future.
TypeDef::BitSequence(_) |
TypeDef::Variant(_) |
TypeDef::Sequence(_) |
TypeDef::Compact(_) |
TypeDef::Primitive(_) => false,
}
}
+137
View File
@@ -0,0 +1,137 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Construct addresses to access constants with.
use alloc::{borrow::Cow, string::String};
use derive_where::derive_where;
use scale_decode::DecodeAsType;
/// This represents a constant address. Anything implementing this trait
/// can be used to fetch constants.
pub trait Address {
/// The target type of the value that lives at this address.
type Target: DecodeAsType;
/// The name of the pallet that the constant lives under.
fn pallet_name(&self) -> &str;
/// The name of the constant in a given pallet.
fn constant_name(&self) -> &str;
/// An optional hash which, if present, will be checked against
/// the node metadata to confirm that the return type matches what
/// we are expecting.
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
// Any reference to an address is a valid address.
impl<A: Address + ?Sized> Address for &'_ A {
type Target = A::Target;
fn pallet_name(&self) -> &str {
A::pallet_name(*self)
}
fn constant_name(&self) -> &str {
A::constant_name(*self)
}
fn validation_hash(&self) -> Option<[u8; 32]> {
A::validation_hash(*self)
}
}
// (str, str) and similar are valid addresses.
impl<A: AsRef<str>, B: AsRef<str>> Address for (A, B) {
type Target = scale_value::Value;
fn pallet_name(&self) -> &str {
self.0.as_ref()
}
fn constant_name(&self) -> &str {
self.1.as_ref()
}
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
/// This represents the address of a constant.
#[derive_where(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct StaticAddress<ReturnTy> {
pallet_name: Cow<'static, str>,
constant_name: Cow<'static, str>,
constant_hash: Option<[u8; 32]>,
_marker: core::marker::PhantomData<ReturnTy>,
}
/// A dynamic lookup address to access a constant.
pub type DynamicAddress<ReturnTy> = StaticAddress<ReturnTy>;
impl<ReturnTy> StaticAddress<ReturnTy> {
/// Create a new [`StaticAddress`] to use to look up a constant.
pub fn new(pallet_name: impl Into<String>, constant_name: impl Into<String>) -> Self {
Self {
pallet_name: Cow::Owned(pallet_name.into()),
constant_name: Cow::Owned(constant_name.into()),
constant_hash: None,
_marker: core::marker::PhantomData,
}
}
/// Create a new [`StaticAddress`] that will be validated
/// against node metadata using the hash given.
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
constant_name: &'static str,
hash: [u8; 32],
) -> Self {
Self {
pallet_name: Cow::Borrowed(pallet_name),
constant_name: Cow::Borrowed(constant_name),
constant_hash: Some(hash),
_marker: core::marker::PhantomData,
}
}
/// Do not validate this constant prior to accessing it.
pub fn unvalidated(self) -> Self {
Self {
pallet_name: self.pallet_name,
constant_name: self.constant_name,
constant_hash: None,
_marker: self._marker,
}
}
}
impl<ReturnTy: DecodeAsType> Address for StaticAddress<ReturnTy> {
type Target = ReturnTy;
fn pallet_name(&self) -> &str {
&self.pallet_name
}
fn constant_name(&self) -> &str {
&self.constant_name
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.constant_hash
}
}
/// Construct a new dynamic constant lookup.
pub fn dynamic<ReturnTy: DecodeAsType>(
pallet_name: impl Into<String>,
constant_name: impl Into<String>,
) -> DynamicAddress<ReturnTy> {
DynamicAddress::new(pallet_name, constant_name)
}
+106
View File
@@ -0,0 +1,106 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Access constants from metadata.
//!
//! Use [`get`] to retrieve a constant from some metadata, or [`validate`] to check that a static
//! constant address lines up with the value seen in the metadata.
//!
//! # Example
//!
//! ```rust
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::constants;
//! use pezkuwi_subxt_core::Metadata;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_small.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Some metadata we'd like to access constants in:
//! let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
//! let metadata = Metadata::decode_from(&metadata_bytes[..]).unwrap();
//!
//! // We can use a static address to obtain some constant:
//! let address = pezkuwi::constants().balances().existential_deposit();
//!
//! // This validates that the address given is in line with the metadata
//! // we're trying to access the constant in:
//! constants::validate(&address, &metadata).expect("is valid");
//!
//! // This acquires the constant (and internally also validates it):
//! let ed = constants::get(&address, &metadata).expect("can decode constant");
//!
//! assert_eq!(ed, 33_333_333);
//! ```
pub mod address;
use crate::{Metadata, error::ConstantError};
use address::Address;
use alloc::{borrow::ToOwned, string::ToString, vec::Vec};
use frame_decode::constants::ConstantTypeInfo;
use scale_decode::IntoVisitor;
/// When the provided `address` is statically generated via the `#[subxt]` macro, this validates
/// that the shape of the constant value is the same as the shape expected by the static address.
///
/// When the provided `address` is dynamic (and thus does not come with any expectation of the
/// shape of the constant value), this just returns `Ok(())`
pub fn validate<Addr: Address>(address: Addr, metadata: &Metadata) -> Result<(), ConstantError> {
if let Some(actual_hash) = address.validation_hash() {
let expected_hash = metadata
.pallet_by_name(address.pallet_name())
.ok_or_else(|| ConstantError::PalletNameNotFound(address.pallet_name().to_string()))?
.constant_hash(address.constant_name())
.ok_or_else(|| ConstantError::ConstantNameNotFound {
pallet_name: address.pallet_name().to_string(),
constant_name: address.constant_name().to_owned(),
})?;
if actual_hash != expected_hash {
return Err(ConstantError::IncompatibleCodegen);
}
}
Ok(())
}
/// Fetch a constant out of the metadata given a constant address. If the `address` has been
/// statically generated, this will validate that the constant shape is as expected, too.
pub fn get<Addr: Address>(
address: Addr,
metadata: &Metadata,
) -> Result<Addr::Target, ConstantError> {
// 1. Validate constant shape if hash given:
validate(&address, metadata)?;
// 2. Attempt to decode the constant into the type given:
let constant = frame_decode::constants::decode_constant(
address.pallet_name(),
address.constant_name(),
metadata,
metadata.types(),
Addr::Target::into_visitor(),
)
.map_err(ConstantError::CouldNotDecodeConstant)?;
Ok(constant)
}
/// Access the bytes of a constant by the address it is registered under.
pub fn get_bytes<Addr: Address>(
address: Addr,
metadata: &Metadata,
) -> Result<Vec<u8>, ConstantError> {
// 1. Validate custom value shape if hash given:
validate(&address, metadata)?;
// 2. Return the underlying bytes:
let constant = metadata
.constant_info(address.pallet_name(), address.constant_name())
.map_err(|e| ConstantError::ConstantInfoError(e.into_owned()))?;
Ok(constant.bytes.to_vec())
}
+104
View File
@@ -0,0 +1,104 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Construct addresses to access custom values with.
use alloc::{borrow::Cow, string::String};
use derive_where::derive_where;
use scale_decode::DecodeAsType;
/// Use this with [`Address::IsDecodable`].
pub use crate::utils::{Maybe, No, NoMaybe};
/// This represents the address of a custom value in the metadata.
/// Anything that implements it can be used to fetch custom values from the metadata.
/// The trait is implemented by [`str`] for dynamic lookup and [`StaticAddress`] for static queries.
pub trait Address {
/// The type of the custom value.
type Target: DecodeAsType;
/// Should be set to `Yes` for Dynamic values and static values that have a valid type.
/// Should be `No` for custom values, that have an invalid type id.
type IsDecodable: NoMaybe;
/// the name (key) by which the custom value can be accessed in the metadata.
fn name(&self) -> &str;
/// An optional hash which, if present, can be checked against node metadata.
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
// Any reference to an address is a valid address
impl<A: Address + ?Sized> Address for &'_ A {
type Target = A::Target;
type IsDecodable = A::IsDecodable;
fn name(&self) -> &str {
A::name(*self)
}
fn validation_hash(&self) -> Option<[u8; 32]> {
A::validation_hash(*self)
}
}
// Support plain strings for looking up custom values.
impl Address for str {
type Target = scale_value::Value;
type IsDecodable = Maybe;
fn name(&self) -> &str {
self
}
}
/// A static address to a custom value.
#[derive_where(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct StaticAddress<ReturnTy, IsDecodable> {
name: Cow<'static, str>,
hash: Option<[u8; 32]>,
marker: core::marker::PhantomData<(ReturnTy, IsDecodable)>,
}
/// A dynamic address to a custom value.
pub type DynamicAddress<ReturnTy> = StaticAddress<ReturnTy, Maybe>;
impl<ReturnTy, IsDecodable> StaticAddress<ReturnTy, IsDecodable> {
#[doc(hidden)]
/// Creates a new StaticAddress.
pub fn new_static(name: &'static str, hash: [u8; 32]) -> Self {
Self { name: Cow::Borrowed(name), hash: Some(hash), marker: core::marker::PhantomData }
}
/// Create a new [`StaticAddress`]
pub fn new(name: impl Into<String>) -> Self {
Self { name: name.into().into(), hash: None, marker: core::marker::PhantomData }
}
/// Do not validate this custom value prior to accessing it.
pub fn unvalidated(self) -> Self {
Self { name: self.name, hash: None, marker: self.marker }
}
}
impl<Target: DecodeAsType, IsDecodable: NoMaybe> Address for StaticAddress<Target, IsDecodable> {
type Target = Target;
type IsDecodable = IsDecodable;
fn name(&self) -> &str {
&self.name
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.hash
}
}
/// Construct a new dynamic custom value lookup.
pub fn dynamic<ReturnTy: DecodeAsType>(
custom_value_name: impl Into<String>,
) -> DynamicAddress<ReturnTy> {
DynamicAddress::new(custom_value_name)
}
+164
View File
@@ -0,0 +1,164 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Access custom values from metadata.
//!
//! Use [`get`] to retrieve a custom value from some metadata, or [`validate`] to check that a
//! static custom value address lines up with the value seen in the metadata.
//!
//! # Example
//!
//! ```rust
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::custom_values;
//! use pezkuwi_subxt_core::Metadata;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_small.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Some metadata we'd like to access custom values in:
//! let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
//! let metadata = Metadata::decode_from(&metadata_bytes[..]).unwrap();
//!
//! // At the moment, we don't expect to see any custom values in the metadata
//! // for Pezkuwi, so this will return an error:
//! let err = custom_values::get("Foo", &metadata);
//! ```
pub mod address;
use crate::{Metadata, error::CustomValueError, utils::Maybe};
use address::Address;
use alloc::vec::Vec;
use frame_decode::custom_values::CustomValueTypeInfo;
use scale_decode::IntoVisitor;
/// Run the validation logic against some custom value address you'd like to access. Returns
/// `Ok(())` if the address is valid (or if it's not possible to check since the address has no
/// validation hash). Returns an error if the address was not valid (wrong name, type or raw bytes)
pub fn validate<Addr: Address>(address: Addr, metadata: &Metadata) -> Result<(), CustomValueError> {
if let Some(actual_hash) = address.validation_hash() {
let custom = metadata.custom();
let custom_value = custom
.get(address.name())
.ok_or_else(|| CustomValueError::NotFound(address.name().into()))?;
let expected_hash = custom_value.hash();
if actual_hash != expected_hash {
return Err(CustomValueError::IncompatibleCodegen);
}
}
Ok(())
}
/// Access a custom value by the address it is registered under. This can be just a [str] to get
/// back a dynamic value, or a static address from the generated static interface to get a value of
/// a static type returned.
pub fn get<Addr: Address<IsDecodable = Maybe>>(
address: Addr,
metadata: &Metadata,
) -> Result<Addr::Target, CustomValueError> {
// 1. Validate custom value shape if hash given:
validate(&address, metadata)?;
// 2. Attempt to decode custom value:
let value = frame_decode::custom_values::decode_custom_value(
address.name(),
metadata,
metadata.types(),
Addr::Target::into_visitor(),
)
.map_err(CustomValueError::CouldNotDecodeCustomValue)?;
Ok(value)
}
/// Access the bytes of a custom value by the address it is registered under.
pub fn get_bytes<Addr: Address>(
address: Addr,
metadata: &Metadata,
) -> Result<Vec<u8>, CustomValueError> {
// 1. Validate custom value shape if hash given:
validate(&address, metadata)?;
// 2. Return the underlying bytes:
let custom_value = metadata
.custom_value_info(address.name())
.map_err(|e| CustomValueError::NotFound(e.not_found))?;
Ok(custom_value.bytes.to_vec())
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::collections::BTreeMap;
use codec::Encode;
use scale_decode::DecodeAsType;
use scale_info::{TypeInfo, form::PortableForm};
use alloc::{borrow::ToOwned, string::String, vec};
use crate::custom_values;
#[derive(Debug, Clone, PartialEq, Eq, Encode, TypeInfo, DecodeAsType)]
pub struct Person {
age: u16,
name: String,
}
fn mock_metadata() -> Metadata {
let person_ty = scale_info::MetaType::new::<Person>();
let unit = scale_info::MetaType::new::<()>();
let mut types = scale_info::Registry::new();
let person_ty_id = types.register_type(&person_ty);
let unit_id = types.register_type(&unit);
let types: scale_info::PortableRegistry = types.into();
let person = Person { age: 42, name: "Neo".into() };
let person_value_metadata: frame_metadata::v15::CustomValueMetadata<PortableForm> =
frame_metadata::v15::CustomValueMetadata { ty: person_ty_id, value: person.encode() };
let frame_metadata = frame_metadata::v15::RuntimeMetadataV15 {
types,
pallets: vec![],
extrinsic: frame_metadata::v15::ExtrinsicMetadata {
version: 0,
address_ty: unit_id,
call_ty: unit_id,
signature_ty: unit_id,
extra_ty: unit_id,
signed_extensions: vec![],
},
ty: unit_id,
apis: vec![],
outer_enums: frame_metadata::v15::OuterEnums {
call_enum_ty: unit_id,
event_enum_ty: unit_id,
error_enum_ty: unit_id,
},
custom: frame_metadata::v15::CustomMetadata {
map: BTreeMap::from_iter([("Mr. Robot".to_owned(), person_value_metadata)]),
},
};
let metadata: pezkuwi_subxt_metadata::Metadata = frame_metadata.try_into().unwrap();
metadata
}
#[test]
fn test_decoding() {
let metadata = mock_metadata();
assert!(custom_values::get("Invalid Address", &metadata).is_err());
let person_addr = custom_values::address::dynamic::<Person>("Mr. Robot");
let person = custom_values::get(&person_addr, &metadata).unwrap();
assert_eq!(person, Person { age: 42, name: "Neo".into() })
}
}
+26
View File
@@ -0,0 +1,26 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module provides the entry points to create dynamic
//! transactions, storage and constant lookups.
pub use scale_value::{At, Value};
// Submit dynamic transactions.
pub use crate::tx::payload::dynamic as tx;
// Lookup constants dynamically.
pub use crate::constants::address::dynamic as constant;
// Lookup storage values dynamically.
pub use crate::storage::address::dynamic as storage;
// Execute runtime API function call dynamically.
pub use crate::runtime_api::payload::dynamic as runtime_api_call;
// Execute View Function API function call dynamically.
pub use crate::view_functions::payload::dynamic as view_function_call;
/// Obtain a custom value from the metadata.
pub use crate::custom_values::address::dynamic as custom_value;
+286
View File
@@ -0,0 +1,286 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! The errors that can be emitted in this crate.
use alloc::{boxed::Box, string::String, vec::Vec};
use thiserror::Error as DeriveError;
/// The error emitted when something goes wrong.
#[derive(Debug, DeriveError)]
#[allow(missing_docs)]
pub enum Error {
#[error(transparent)]
StorageError(#[from] StorageError),
#[error(transparent)]
Extrinsic(#[from] ExtrinsicError),
#[error(transparent)]
Constant(#[from] ConstantError),
#[error(transparent)]
CustomValue(#[from] CustomValueError),
#[error(transparent)]
RuntimeApi(#[from] RuntimeApiError),
#[error(transparent)]
ViewFunction(#[from] ViewFunctionError),
#[error(transparent)]
Events(#[from] EventsError),
}
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum EventsError {
#[error("Can't decode event: can't decode phase: {0}")]
CannotDecodePhase(codec::Error),
#[error("Can't decode event: can't decode pallet index: {0}")]
CannotDecodePalletIndex(codec::Error),
#[error("Can't decode event: can't decode variant index: {0}")]
CannotDecodeVariantIndex(codec::Error),
#[error("Can't decode event: can't find pallet with index {0}")]
CannotFindPalletWithIndex(u8),
#[error(
"Can't decode event: can't find variant with index {variant_index} in pallet {pallet_name}"
)]
CannotFindVariantWithIndex { pallet_name: String, variant_index: u8 },
#[error("Can't decode field {field_name:?} in event {pallet_name}.{event_name}: {reason}")]
CannotDecodeFieldInEvent {
pallet_name: String,
event_name: String,
field_name: String,
reason: scale_decode::visitor::DecodeError,
},
#[error("Can't decode event topics: {0}")]
CannotDecodeEventTopics(codec::Error),
#[error("Can't decode the fields of event {pallet_name}.{event_name}: {reason}")]
CannotDecodeEventFields { pallet_name: String, event_name: String, reason: scale_decode::Error },
#[error("Can't decode event {pallet_name}.{event_name} to Event enum: {reason}")]
CannotDecodeEventEnum { pallet_name: String, event_name: String, reason: scale_decode::Error },
}
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum ViewFunctionError {
#[error("The static View Function address used is not compatible with the live chain")]
IncompatibleCodegen,
#[error("Can't find View Function: pallet {0} not found")]
PalletNotFound(String),
#[error("Can't find View Function {function_name} in pallet {pallet_name}")]
ViewFunctionNotFound { pallet_name: String, function_name: String },
#[error("Failed to encode View Function inputs: {0}")]
CouldNotEncodeInputs(frame_decode::view_functions::ViewFunctionInputsEncodeError),
#[error("Failed to decode View Function: {0}")]
CouldNotDecodeResponse(frame_decode::view_functions::ViewFunctionDecodeError<u32>),
}
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum RuntimeApiError {
#[error("The static Runtime API address used is not compatible with the live chain")]
IncompatibleCodegen,
#[error("Runtime API trait not found: {0}")]
TraitNotFound(String),
#[error("Runtime API method {method_name} not found in trait {trait_name}")]
MethodNotFound { trait_name: String, method_name: String },
#[error("Failed to encode Runtime API inputs: {0}")]
CouldNotEncodeInputs(frame_decode::runtime_apis::RuntimeApiInputsEncodeError),
#[error("Failed to decode Runtime API: {0}")]
CouldNotDecodeResponse(frame_decode::runtime_apis::RuntimeApiDecodeError<u32>),
}
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum CustomValueError {
#[error("The static custom value address used is not compatible with the live chain")]
IncompatibleCodegen,
#[error("The custom value '{0}' was not found")]
NotFound(String),
#[error("Failed to decode custom value: {0}")]
CouldNotDecodeCustomValue(frame_decode::custom_values::CustomValueDecodeError<u32>),
}
/// Something went wrong working with a constant.
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum ConstantError {
#[error("The static constant address used is not compatible with the live chain")]
IncompatibleCodegen,
#[error("Can't find constant: pallet with name {0} not found")]
PalletNameNotFound(String),
#[error(
"Constant '{constant_name}' not found in pallet {pallet_name} in the live chain metadata"
)]
ConstantNameNotFound { pallet_name: String, constant_name: String },
#[error("Failed to decode constant: {0}")]
CouldNotDecodeConstant(frame_decode::constants::ConstantDecodeError<u32>),
#[error("Cannot obtain constant information from metadata: {0}")]
ConstantInfoError(frame_decode::constants::ConstantInfoError<'static>),
}
/// Something went wrong trying to encode or decode a storage address.
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum StorageError {
#[error("The static storage address used is not compatible with the live chain")]
IncompatibleCodegen,
#[error("Can't find storage value: pallet with name {0} not found")]
PalletNameNotFound(String),
#[error(
"Storage entry '{entry_name}' not found in pallet {pallet_name} in the live chain metadata"
)]
StorageEntryNotFound { pallet_name: String, entry_name: String },
#[error("Cannot obtain storage information from metadata: {0}")]
StorageInfoError(frame_decode::storage::StorageInfoError<'static>),
#[error("Cannot encode storage key: {0}")]
StorageKeyEncodeError(frame_decode::storage::StorageKeyEncodeError),
#[error("Cannot create a key to iterate over a plain entry")]
CannotIterPlainEntry { pallet_name: String, entry_name: String },
#[error(
"Wrong number of key parts provided to iterate a storage address. We expected at most {max_expected} key parts but got {got} key parts"
)]
WrongNumberOfKeyPartsProvidedForIterating { max_expected: usize, got: usize },
#[error(
"Wrong number of key parts provided to fetch a storage address. We expected {expected} key parts but got {got} key parts"
)]
WrongNumberOfKeyPartsProvidedForFetching { expected: usize, got: usize },
}
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum StorageKeyError {
#[error("Can't decode the storage key: {error}")]
StorageKeyDecodeError {
bytes: Vec<u8>,
error: frame_decode::storage::StorageKeyDecodeError<u32>,
},
#[error("Can't decode the values from the storage key: {0}")]
CannotDecodeValuesInKey(frame_decode::storage::StorageKeyValueDecodeError),
#[error(
"Cannot decode storage key: there were leftover bytes, indicating that the decoding failed"
)]
LeftoverBytes { bytes: Vec<u8> },
#[error("Can't decode a single value from the storage key part at index {index}: {error}")]
CannotDecodeValueInKey { index: usize, error: scale_decode::Error },
}
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum StorageValueError {
#[error("Cannot decode storage value: {0}")]
CannotDecode(frame_decode::storage::StorageValueDecodeError<u32>),
#[error(
"Cannot decode storage value: there were leftover bytes, indicating that the decoding failed"
)]
LeftoverBytes { bytes: Vec<u8> },
}
/// An error that can be encountered when constructing a transaction.
#[derive(Debug, DeriveError)]
#[allow(missing_docs)]
pub enum ExtrinsicError {
#[error("The extrinsic payload is not compatible with the live chain")]
IncompatibleCodegen,
#[error("Can't find extrinsic: pallet with name {0} not found")]
PalletNameNotFound(String),
#[error("Can't find extrinsic: call name {call_name} doesn't exist in pallet {pallet_name}")]
CallNameNotFound { pallet_name: String, call_name: String },
#[error("Can't encode the extrinsic call data: {0}")]
CannotEncodeCallData(scale_encode::Error),
#[error("Subxt does not support the extrinsic versions expected by the chain")]
UnsupportedVersion,
#[error("Cannot construct the required transaction extensions: {0}")]
Params(#[from] ExtrinsicParamsError),
#[error("Cannot decode transaction extension '{name}': {error}")]
CouldNotDecodeTransactionExtension {
/// The extension name.
name: String,
/// The decode error.
error: scale_decode::Error,
},
#[error(
"After decoding the extrinsic at index {extrinsic_index}, {num_leftover_bytes} bytes were left, suggesting that decoding may have failed"
)]
LeftoverBytes {
/// Index of the extrinsic that failed to decode.
extrinsic_index: usize,
/// Number of bytes leftover after decoding the extrinsic.
num_leftover_bytes: usize,
},
#[error("{0}")]
ExtrinsicDecodeErrorAt(#[from] ExtrinsicDecodeErrorAt),
#[error("Failed to decode the fields of an extrinsic at index {extrinsic_index}: {error}")]
CannotDecodeFields {
/// Index of the extrinsic whose fields we could not decode
extrinsic_index: usize,
/// The decode error.
error: scale_decode::Error,
},
#[error("Failed to decode the extrinsic at index {extrinsic_index} to a root enum: {error}")]
CannotDecodeIntoRootExtrinsic {
/// Index of the extrinsic that we failed to decode
extrinsic_index: usize,
/// The decode error.
error: scale_decode::Error,
},
}
#[derive(Debug, thiserror::Error)]
#[non_exhaustive]
#[allow(missing_docs)]
#[error("Cannot decode extrinsic at index {extrinsic_index}: {error}")]
pub struct ExtrinsicDecodeErrorAt {
pub extrinsic_index: usize,
pub error: ExtrinsicDecodeErrorAtReason,
}
#[derive(Debug, thiserror::Error)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum ExtrinsicDecodeErrorAtReason {
#[error("{0}")]
DecodeError(frame_decode::extrinsics::ExtrinsicDecodeError),
#[error("Leftover bytes")]
LeftoverBytes(Vec<u8>),
}
/// An error that can be emitted when trying to construct an instance of
/// [`crate::config::ExtrinsicParams`], encode data from the instance, or match on signed
/// extensions.
#[derive(Debug, DeriveError)]
#[non_exhaustive]
#[allow(missing_docs)]
pub enum ExtrinsicParamsError {
#[error("Cannot find type id '{type_id} in the metadata (context: {context})")]
MissingTypeId {
/// Type ID.
type_id: u32,
/// Some arbitrary context to help narrow the source of the error.
context: &'static str,
},
#[error("The chain expects a signed extension with the name {0}, but we did not provide one")]
UnknownTransactionExtension(String),
#[error("Error constructing extrinsic parameters: {0}")]
Custom(Box<dyn core::error::Error + Send + Sync + 'static>),
}
impl ExtrinsicParamsError {
/// Create a custom [`ExtrinsicParamsError`] from a string.
pub fn custom<S: Into<String>>(error: S) -> Self {
let error: String = error.into();
let error: Box<dyn core::error::Error + Send + Sync + 'static> = Box::from(error);
ExtrinsicParamsError::Custom(error)
}
}
impl From<core::convert::Infallible> for ExtrinsicParamsError {
fn from(value: core::convert::Infallible) -> Self {
match value {}
}
}
+996
View File
@@ -0,0 +1,996 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Decode and work with events.
//!
//! # Example
//!
//! ```rust
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::config::PezkuwiConfig;
//! use pezkuwi_subxt_core::events;
//! use pezkuwi_subxt_core::Metadata;
//! use pezkuwi_subxt_core::dynamic::Value;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_full.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Some metadata we'll use to work with storage entries:
//! let metadata_bytes = include_bytes!("../../artifacts/pezkuwi_metadata_full.scale");
//! let metadata = Metadata::decode_from(&metadata_bytes[..]).unwrap();
//!
//! // Some bytes representing events (located in System.Events storage):
//! let event_bytes = hex::decode("1c00000000000000a2e9b53d5517020000000100000000000310c96d901d0102000000020000000408d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27dbeea5a030000000000000000000000000000020000000402d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d8eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a48102700000000000000000000000000000000020000000407be5ddb1579b72e84524fc29e78609e3caf42e85aa118ebfe0b0ad404b5bdd25fbeea5a030000000000000000000000000000020000002100d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27dbeea5a03000000000000000000000000000000000000000000000000000000000000020000000000426df03e00000000").unwrap();
//!
//! // We can decode these bytes like so:
//! let evs = events::decode_from::<PezkuwiConfig>(event_bytes, metadata);
//!
//! // And then do things like iterate over them and inspect details:
//! for ev in evs.iter() {
//! let ev = ev.unwrap();
//! println!("Index: {}", ev.index());
//! println!("Name: {}.{}", ev.pallet_name(), ev.variant_name());
//! println!("Fields: {:?}", ev.decode_as_fields::<Value>().unwrap());
//! }
//! ```
use alloc::{string::ToString, sync::Arc, vec::Vec};
use codec::{Compact, Decode, Encode};
use derive_where::derive_where;
use pezkuwi_subxt_metadata::PalletMetadata;
use scale_decode::{DecodeAsFields, DecodeAsType};
use crate::{
Metadata,
config::{Config, HashFor},
error::EventsError,
};
/// Create a new [`Events`] instance from the given bytes.
///
/// This is a shortcut for [`Events::decode_from`].
pub fn decode_from<T: Config>(event_bytes: Vec<u8>, metadata: Metadata) -> Events<T> {
Events::decode_from(event_bytes, metadata)
}
/// Trait to uniquely identify the events's identity from the runtime metadata.
///
/// Generated API structures that represent an event implement this trait.
///
/// The trait is utilized to decode emitted events from a block, via obtaining the
/// form of the `Event` from the metadata.
pub trait StaticEvent: DecodeAsFields {
/// Pallet name.
const PALLET: &'static str;
/// Event name.
const EVENT: &'static str;
/// Returns true if the given pallet and event names match this event.
fn is_event(pallet: &str, event: &str) -> bool {
Self::PALLET == pallet && Self::EVENT == event
}
}
/// A collection of events obtained from a block, bundled with the necessary
/// information needed to decode and iterate over them.
#[derive_where(Clone)]
pub struct Events<T> {
metadata: Metadata,
// Note; raw event bytes are prefixed with a Compact<u32> containing
// the number of events to be decoded. The start_idx reflects that, so
// that we can skip over those bytes when decoding them
event_bytes: Arc<[u8]>,
start_idx: usize,
num_events: u32,
marker: core::marker::PhantomData<T>,
}
// Ignore the Metadata when debug-logging events; it's big and distracting.
impl<T> core::fmt::Debug for Events<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("Events")
.field("event_bytes", &self.event_bytes)
.field("start_idx", &self.start_idx)
.field("num_events", &self.num_events)
.finish()
}
}
impl<T: Config> Events<T> {
/// Create a new [`Events`] instance from the given bytes.
pub fn decode_from(event_bytes: Vec<u8>, metadata: Metadata) -> Self {
// event_bytes is a SCALE encoded vector of events. So, pluck the
// compact encoded length from the front, leaving the remaining bytes
// for our iterating to decode.
//
// Note: if we get no bytes back, avoid an error reading vec length
// and default to 0 events.
let cursor = &mut &*event_bytes;
let num_events = <Compact<u32>>::decode(cursor).unwrap_or(Compact(0)).0;
// Start decoding after the compact encoded bytes.
let start_idx = event_bytes.len() - cursor.len();
Self {
metadata,
event_bytes: event_bytes.into(),
start_idx,
num_events,
marker: core::marker::PhantomData,
}
}
/// The number of events.
pub fn len(&self) -> u32 {
self.num_events
}
/// Are there no events in this block?
// Note: mainly here to satisfy clippy.
pub fn is_empty(&self) -> bool {
self.num_events == 0
}
/// Return the bytes representing all of the events.
pub fn bytes(&self) -> &[u8] {
&self.event_bytes
}
/// Iterate over all of the events, using metadata to dynamically
/// decode them as we go, and returning the raw bytes and other associated
/// details. If an error occurs, all subsequent iterations return `None`.
// Dev note: The returned iterator is 'static + Send so that we can box it up and make
// use of it with our `FilterEvents` stuff.
pub fn iter(
&self,
) -> impl Iterator<Item = Result<EventDetails<T>, EventsError>> + Send + Sync + 'static {
// The event bytes ignoring the compact encoded length on the front:
let event_bytes = self.event_bytes.clone();
let metadata = self.metadata.clone();
let num_events = self.num_events;
let mut pos = self.start_idx;
let mut index = 0;
core::iter::from_fn(move || {
if event_bytes.len() <= pos || num_events == index {
None
} else {
match EventDetails::decode_from(metadata.clone(), event_bytes.clone(), pos, index) {
Ok(event_details) => {
// Skip over decoded bytes in next iteration:
pos += event_details.bytes().len();
// Increment the index:
index += 1;
// Return the event details:
Some(Ok(event_details))
},
Err(e) => {
// By setting the position to the "end" of the event bytes,
// the cursor len will become 0 and the iterator will return `None`
// from now on:
pos = event_bytes.len();
Some(Err(e))
},
}
}
})
}
/// Iterate through the events using metadata to dynamically decode and skip
/// them, and return only those which should decode to the provided `Ev` type.
/// If an error occurs, all subsequent iterations return `None`.
pub fn find<Ev: StaticEvent>(&self) -> impl Iterator<Item = Result<Ev, EventsError>> {
self.iter().filter_map(|ev| ev.and_then(|ev| ev.as_event::<Ev>()).transpose())
}
/// Iterate through the events using metadata to dynamically decode and skip
/// them, and return the first event found which decodes to the provided `Ev` type.
pub fn find_first<Ev: StaticEvent>(&self) -> Result<Option<Ev>, EventsError> {
self.find::<Ev>().next().transpose()
}
/// Iterate through the events using metadata to dynamically decode and skip
/// them, and return the last event found which decodes to the provided `Ev` type.
pub fn find_last<Ev: StaticEvent>(&self) -> Result<Option<Ev>, EventsError> {
self.find::<Ev>().last().transpose()
}
/// Find an event that decodes to the type provided. Returns true if it was found.
pub fn has<Ev: StaticEvent>(&self) -> Result<bool, EventsError> {
Ok(self.find::<Ev>().next().transpose()?.is_some())
}
}
/// A phase of a block's execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Decode, Encode)]
pub enum Phase {
/// Applying an extrinsic.
ApplyExtrinsic(u32),
/// Finalizing the block.
Finalization,
/// Initializing the block.
Initialization,
}
/// The event details.
#[derive(Debug, Clone)]
pub struct EventDetails<T: Config> {
phase: Phase,
/// The index of the event in the list of events in a given block.
index: u32,
all_bytes: Arc<[u8]>,
// start of the bytes (phase, pallet/variant index and then fields and then topic to follow).
start_idx: usize,
// start of the event (ie pallet/variant index and then the fields and topic after).
event_start_idx: usize,
// start of the fields (ie after phase and pallet/variant index).
event_fields_start_idx: usize,
// end of the fields.
event_fields_end_idx: usize,
// end of everything (fields + topics)
end_idx: usize,
metadata: Metadata,
topics: Vec<HashFor<T>>,
}
impl<T: Config> EventDetails<T> {
/// Attempt to dynamically decode a single event from our events input.
fn decode_from(
metadata: Metadata,
all_bytes: Arc<[u8]>,
start_idx: usize,
index: u32,
) -> Result<EventDetails<T>, EventsError> {
let input = &mut &all_bytes[start_idx..];
let phase = Phase::decode(input).map_err(EventsError::CannotDecodePhase)?;
let event_start_idx = all_bytes.len() - input.len();
let pallet_index = u8::decode(input).map_err(EventsError::CannotDecodePalletIndex)?;
let variant_index = u8::decode(input).map_err(EventsError::CannotDecodeVariantIndex)?;
let event_fields_start_idx = all_bytes.len() - input.len();
// Get metadata for the event:
let event_pallet = metadata
.pallet_by_event_index(pallet_index)
.ok_or_else(|| EventsError::CannotFindPalletWithIndex(pallet_index))?;
let event_variant =
event_pallet.event_variant_by_index(variant_index).ok_or_else(|| {
EventsError::CannotFindVariantWithIndex {
pallet_name: event_pallet.name().to_string(),
variant_index,
}
})?;
tracing::debug!("Decoding Event '{}::{}'", event_pallet.name(), &event_variant.name);
// Skip over the bytes belonging to this event.
for field_metadata in &event_variant.fields {
// Skip over the bytes for this field:
scale_decode::visitor::decode_with_visitor(
input,
field_metadata.ty.id,
metadata.types(),
scale_decode::visitor::IgnoreVisitor::new(),
)
.map_err(|e| EventsError::CannotDecodeFieldInEvent {
pallet_name: event_pallet.name().to_string(),
event_name: event_variant.name.clone(),
field_name: field_metadata.name.clone().unwrap_or("<unknown>".to_string()),
reason: e,
})?;
}
// the end of the field bytes.
let event_fields_end_idx = all_bytes.len() - input.len();
// topics come after the event data in EventRecord.
let topics =
Vec::<HashFor<T>>::decode(input).map_err(EventsError::CannotDecodeEventTopics)?;
// what bytes did we skip over in total, including topics.
let end_idx = all_bytes.len() - input.len();
Ok(EventDetails {
phase,
index,
start_idx,
event_start_idx,
event_fields_start_idx,
event_fields_end_idx,
end_idx,
all_bytes,
metadata,
topics,
})
}
/// When was the event produced?
pub fn phase(&self) -> Phase {
self.phase
}
/// What index is this event in the stored events for this block.
pub fn index(&self) -> u32 {
self.index
}
/// The index of the pallet that the event originated from.
pub fn pallet_index(&self) -> u8 {
// Note: never panics; we expect these bytes to exist
// in order that the EventDetails could be created.
self.all_bytes[self.event_fields_start_idx - 2]
}
/// The index of the event variant that the event originated from.
pub fn variant_index(&self) -> u8 {
// Note: never panics; we expect these bytes to exist
// in order that the EventDetails could be created.
self.all_bytes[self.event_fields_start_idx - 1]
}
/// The name of the pallet from whence the Event originated.
pub fn pallet_name(&self) -> &str {
self.event_metadata().pallet.name()
}
/// Alias for pallet_name() - rebranded terminology (pezpallet)
pub fn pezpallet_name(&self) -> &str {
self.pallet_name()
}
/// The name of the event (ie the name of the variant that it corresponds to).
pub fn variant_name(&self) -> &str {
&self.event_metadata().variant.name
}
/// Fetch details from the metadata for this event.
pub fn event_metadata(&self) -> EventMetadataDetails<'_> {
let pallet = self
.metadata
.pallet_by_event_index(self.pallet_index())
.expect("event pallet to be found; we did this already during decoding");
let variant = pallet
.event_variant_by_index(self.variant_index())
.expect("event variant to be found; we did this already during decoding");
EventMetadataDetails { pallet, variant }
}
/// Return _all_ of the bytes representing this event, which include, in order:
/// - The phase.
/// - Pallet and event index.
/// - Event fields.
/// - Event Topics.
pub fn bytes(&self) -> &[u8] {
&self.all_bytes[self.start_idx..self.end_idx]
}
/// Return the bytes representing the fields stored in this event.
pub fn field_bytes(&self) -> &[u8] {
&self.all_bytes[self.event_fields_start_idx..self.event_fields_end_idx]
}
/// Decode and provide the event fields back in the form of a [`scale_value::Composite`]
/// type which represents the named or unnamed fields that were present in the event.
pub fn decode_as_fields<E: DecodeAsFields>(&self) -> Result<E, EventsError> {
let bytes = &mut self.field_bytes();
let event_metadata = self.event_metadata();
let mut fields = event_metadata
.variant
.fields
.iter()
.map(|f| scale_decode::Field::new(f.ty.id, f.name.as_deref()));
let decoded =
E::decode_as_fields(bytes, &mut fields, self.metadata.types()).map_err(|e| {
EventsError::CannotDecodeEventFields {
pallet_name: event_metadata.pallet.name().to_string(),
event_name: event_metadata.variant.name.clone(),
reason: e,
}
})?;
Ok(decoded)
}
/// Attempt to decode these [`EventDetails`] into a type representing the event fields.
/// Such types are exposed in the codegen as `pallet_name::events::EventName` types.
pub fn as_event<E: StaticEvent>(&self) -> Result<Option<E>, EventsError> {
let ev_metadata = self.event_metadata();
if ev_metadata.pallet.name() == E::PALLET && ev_metadata.variant.name == E::EVENT {
let mut fields = ev_metadata
.variant
.fields
.iter()
.map(|f| scale_decode::Field::new(f.ty.id, f.name.as_deref()));
let decoded =
E::decode_as_fields(&mut self.field_bytes(), &mut fields, self.metadata.types())
.map_err(|e| EventsError::CannotDecodeEventFields {
pallet_name: E::PALLET.to_string(),
event_name: E::EVENT.to_string(),
reason: e,
})?;
Ok(Some(decoded))
} else {
Ok(None)
}
}
/// Attempt to decode these [`EventDetails`] into a root event type (which includes
/// the pallet and event enum variants as well as the event fields). A compatible
/// type for this is exposed via static codegen as a root level `Event` type.
pub fn as_root_event<E: DecodeAsType>(&self) -> Result<E, EventsError> {
let bytes = &self.all_bytes[self.event_start_idx..self.event_fields_end_idx];
let decoded = E::decode_as_type(
&mut &bytes[..],
self.metadata.outer_enums().event_enum_ty(),
self.metadata.types(),
)
.map_err(|e| {
let md = self.event_metadata();
EventsError::CannotDecodeEventEnum {
pallet_name: md.pallet.name().to_string(),
event_name: md.variant.name.clone(),
reason: e,
}
})?;
Ok(decoded)
}
/// Return the topics associated with this event.
pub fn topics(&self) -> &[HashFor<T>] {
&self.topics
}
}
/// Details for the given event plucked from the metadata.
pub struct EventMetadataDetails<'a> {
/// Metadata for the pallet that the event belongs to.
pub pallet: PalletMetadata<'a>,
/// Metadata for the variant which describes the pallet events.
pub variant: &'a scale_info::Variant<scale_info::form::PortableForm>,
}
/// Event related test utilities used outside this module.
#[cfg(test)]
pub(crate) mod test_utils {
use super::*;
use crate::config::{HashFor, BizinikiwConfig};
use codec::Encode;
use frame_metadata::{
RuntimeMetadataPrefixed,
v15::{
CustomMetadata, ExtrinsicMetadata, OuterEnums, PalletEventMetadata, PalletMetadata,
RuntimeMetadataV15,
},
};
use scale_info::{TypeInfo, meta_type};
/// An "outer" events enum containing exactly one event.
#[derive(
Encode,
Decode,
TypeInfo,
Clone,
Debug,
PartialEq,
Eq,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
pub enum AllEvents<Ev> {
Test(Ev),
}
/// This encodes to the same format an event is expected to encode to
/// in node System.Events storage.
#[derive(Encode)]
pub struct EventRecord<E: Encode> {
phase: Phase,
event: AllEvents<E>,
topics: Vec<HashFor<BizinikiwConfig>>,
}
impl<E: Encode> EventRecord<E> {
/// Create a new event record with the given phase, event, and topics.
pub fn new(phase: Phase, event: E, topics: Vec<HashFor<BizinikiwConfig>>) -> Self {
Self { phase, event: AllEvents::Test(event), topics }
}
}
/// Build an EventRecord, which encoded events in the format expected
/// to be handed back from storage queries to System.Events.
pub fn event_record<E: Encode>(phase: Phase, event: E) -> EventRecord<E> {
EventRecord::new(phase, event, vec![])
}
/// Build fake metadata consisting of a single pallet that knows
/// about the event type provided.
pub fn metadata<E: TypeInfo + 'static>() -> Metadata {
// Extrinsic needs to contain at least the generic type parameter "Call"
// for the metadata to be valid.
// The "Call" type from the metadata is used to decode extrinsics.
// In reality, the extrinsic type has "Call", "Address", "Extra", "Signature" generic types.
#[allow(unused)]
#[derive(TypeInfo)]
struct ExtrinsicType<Call> {
call: Call,
}
// Because this type is used to decode extrinsics, we expect this to be a TypeDefVariant.
// Each pallet must contain one single variant.
#[allow(unused)]
#[derive(TypeInfo)]
enum RuntimeCall {
PalletName(Pallet),
}
// The calls of the pallet.
#[allow(unused)]
#[derive(TypeInfo)]
enum Pallet {
#[allow(unused)]
SomeCall,
}
let pallets = vec![PalletMetadata {
name: "Test",
storage: None,
calls: None,
event: Some(PalletEventMetadata { ty: meta_type::<E>() }),
constants: vec![],
error: None,
index: 0,
docs: vec![],
}];
let extrinsic = ExtrinsicMetadata {
version: 0,
signed_extensions: vec![],
address_ty: meta_type::<()>(),
call_ty: meta_type::<RuntimeCall>(),
signature_ty: meta_type::<()>(),
extra_ty: meta_type::<()>(),
};
let meta = RuntimeMetadataV15::new(
pallets,
extrinsic,
meta_type::<()>(),
vec![],
OuterEnums {
call_enum_ty: meta_type::<()>(),
event_enum_ty: meta_type::<AllEvents<E>>(),
error_enum_ty: meta_type::<()>(),
},
CustomMetadata { map: Default::default() },
);
let runtime_metadata: RuntimeMetadataPrefixed = meta.into();
let metadata: pezkuwi_subxt_metadata::Metadata = runtime_metadata.try_into().unwrap();
metadata
}
/// Build an `Events` object for test purposes, based on the details provided,
/// and with a default block hash.
pub fn events<E: Decode + Encode>(
metadata: Metadata,
event_records: Vec<EventRecord<E>>,
) -> Events<BizinikiwConfig> {
let num_events = event_records.len() as u32;
let mut event_bytes = Vec::new();
for ev in event_records {
ev.encode_to(&mut event_bytes);
}
events_raw(metadata, event_bytes, num_events)
}
/// Much like [`events`], but takes pre-encoded events and event count, so that we can
/// mess with the bytes in tests if we need to.
pub fn events_raw(
metadata: Metadata,
event_bytes: Vec<u8>,
num_events: u32,
) -> Events<BizinikiwConfig> {
// Prepend compact encoded length to event bytes:
let mut all_event_bytes = Compact(num_events).encode();
all_event_bytes.extend(event_bytes);
Events::decode_from(all_event_bytes, metadata)
}
}
#[cfg(test)]
mod tests {
use super::{
test_utils::{AllEvents, EventRecord, event_record, events, events_raw},
*,
};
use crate::{config::BizinikiwConfig, events::Phase};
use codec::Encode;
use primitive_types::H256;
use scale_info::TypeInfo;
use scale_value::Value;
/// Build a fake wrapped metadata.
fn metadata<E: TypeInfo + 'static>() -> Metadata {
test_utils::metadata::<E>()
}
/// [`RawEventDetails`] can be annoying to test, because it contains
/// type info in the decoded field Values. Strip that here so that
/// we can compare fields more easily.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TestRawEventDetails {
pub phase: Phase,
pub index: u32,
pub pallet: String,
pub pallet_index: u8,
pub variant: String,
pub variant_index: u8,
pub fields: Vec<scale_value::Value>,
}
/// Compare some actual [`RawEventDetails`] with a hand-constructed
/// (probably) [`TestRawEventDetails`].
pub fn assert_raw_events_match(
actual: EventDetails<BizinikiwConfig>,
expected: TestRawEventDetails,
) {
let actual_fields_no_context: Vec<_> = actual
.decode_as_fields::<scale_value::Composite<()>>()
.expect("can decode field values (2)")
.into_values()
.map(|value| value.remove_context())
.collect();
// Check each of the other fields:
assert_eq!(actual.phase(), expected.phase);
assert_eq!(actual.index(), expected.index);
assert_eq!(actual.pallet_name(), expected.pallet);
assert_eq!(actual.pallet_index(), expected.pallet_index);
assert_eq!(actual.variant_name(), expected.variant);
assert_eq!(actual.variant_index(), expected.variant_index);
assert_eq!(actual_fields_no_context, expected.fields);
}
#[test]
fn statically_decode_single_root_event() {
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType)]
enum Event {
A(u8, bool, Vec<String>),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let event = Event::A(1, true, vec!["Hi".into()]);
let events = events::<Event>(
metadata,
vec![event_record(Phase::ApplyExtrinsic(123), event.clone())],
);
let ev = events
.iter()
.next()
.expect("one event expected")
.expect("event should be extracted OK");
// This is the line we're testing:
let decoded_event = ev
.as_root_event::<AllEvents<Event>>()
.expect("can decode event into root enum again");
// It should equal the event we put in:
assert_eq!(decoded_event, AllEvents::Test(event));
}
#[test]
fn dynamically_decode_single_event() {
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo)]
enum Event {
A(u8, bool, Vec<String>),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let event = Event::A(1, true, vec!["Hi".into()]);
let events =
events::<Event>(metadata, vec![event_record(Phase::ApplyExtrinsic(123), event)]);
let mut event_details = events.iter();
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
phase: Phase::ApplyExtrinsic(123),
index: 0,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![
Value::u128(1),
Value::bool(true),
Value::unnamed_composite(vec![Value::string("Hi")]),
],
},
);
assert!(event_details.next().is_none());
}
#[test]
fn dynamically_decode_multiple_events() {
#[derive(Clone, Copy, Debug, PartialEq, Decode, Encode, TypeInfo)]
enum Event {
A(u8),
B(bool),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let event1 = Event::A(1);
let event2 = Event::B(true);
let event3 = Event::A(234);
let events = events::<Event>(
metadata,
vec![
event_record(Phase::Initialization, event1),
event_record(Phase::ApplyExtrinsic(123), event2),
event_record(Phase::Finalization, event3),
],
);
let mut event_details = events.iter();
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
index: 0,
phase: Phase::Initialization,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![Value::u128(1)],
},
);
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
index: 1,
phase: Phase::ApplyExtrinsic(123),
pallet: "Test".to_string(),
pallet_index: 0,
variant: "B".to_string(),
variant_index: 1,
fields: vec![Value::bool(true)],
},
);
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
index: 2,
phase: Phase::Finalization,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![Value::u128(234)],
},
);
assert!(event_details.next().is_none());
}
#[test]
fn dynamically_decode_multiple_events_until_error() {
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo)]
enum Event {
A(u8),
B(bool),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode 2 events:
let mut event_bytes = vec![];
event_record(Phase::Initialization, Event::A(1)).encode_to(&mut event_bytes);
event_record(Phase::ApplyExtrinsic(123), Event::B(true)).encode_to(&mut event_bytes);
// Push a few naff bytes to the end (a broken third event):
event_bytes.extend_from_slice(&[3, 127, 45, 0, 2]);
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let events = events_raw(
metadata,
event_bytes,
3, // 2 "good" events, and then it'll hit the naff bytes.
);
let mut events_iter = events.iter();
assert_raw_events_match(
events_iter.next().unwrap().unwrap(),
TestRawEventDetails {
index: 0,
phase: Phase::Initialization,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![Value::u128(1)],
},
);
assert_raw_events_match(
events_iter.next().unwrap().unwrap(),
TestRawEventDetails {
index: 1,
phase: Phase::ApplyExtrinsic(123),
pallet: "Test".to_string(),
pallet_index: 0,
variant: "B".to_string(),
variant_index: 1,
fields: vec![Value::bool(true)],
},
);
// We'll hit an error trying to decode the third event:
assert!(events_iter.next().unwrap().is_err());
// ... and then "None" from then on.
assert!(events_iter.next().is_none());
assert!(events_iter.next().is_none());
}
#[test]
fn compact_event_field() {
#[derive(Clone, Debug, PartialEq, Encode, Decode, TypeInfo)]
enum Event {
A(#[codec(compact)] u32),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let events =
events::<Event>(metadata, vec![event_record(Phase::Finalization, Event::A(1))]);
// Dynamically decode:
let mut event_details = events.iter();
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
index: 0,
phase: Phase::Finalization,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![Value::u128(1)],
},
);
assert!(event_details.next().is_none());
}
#[test]
fn compact_wrapper_struct_field() {
#[derive(Clone, Decode, Debug, PartialEq, Encode, TypeInfo)]
enum Event {
A(#[codec(compact)] CompactWrapper),
}
#[derive(Clone, Decode, Debug, PartialEq, codec::CompactAs, Encode, TypeInfo)]
struct CompactWrapper(u64);
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let events = events::<Event>(
metadata,
vec![event_record(Phase::Finalization, Event::A(CompactWrapper(1)))],
);
// Dynamically decode:
let mut event_details = events.iter();
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
index: 0,
phase: Phase::Finalization,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![Value::unnamed_composite(vec![Value::u128(1)])],
},
);
assert!(event_details.next().is_none());
}
#[test]
fn event_containing_explicit_index() {
#[derive(Clone, Debug, PartialEq, Eq, Decode, Encode, TypeInfo)]
#[repr(u8)]
#[allow(trivial_numeric_casts, clippy::unnecessary_cast)] // required because the Encode derive produces a warning otherwise
pub enum MyType {
B = 10u8,
}
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo)]
enum Event {
A(MyType),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let events =
events::<Event>(metadata, vec![event_record(Phase::Finalization, Event::A(MyType::B))]);
// Dynamically decode:
let mut event_details = events.iter();
assert_raw_events_match(
event_details.next().unwrap().unwrap(),
TestRawEventDetails {
index: 0,
phase: Phase::Finalization,
pallet: "Test".to_string(),
pallet_index: 0,
variant: "A".to_string(),
variant_index: 0,
fields: vec![Value::unnamed_variant("B", vec![])],
},
);
assert!(event_details.next().is_none());
}
#[test]
fn topics() {
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType)]
enum Event {
A(u8, bool, Vec<String>),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construct an Events object to iterate them:
let event = Event::A(1, true, vec!["Hi".into()]);
let topics = vec![H256::from_low_u64_le(123), H256::from_low_u64_le(456)];
let events = events::<Event>(
metadata,
vec![EventRecord::new(Phase::ApplyExtrinsic(123), event, topics.clone())],
);
let ev = events
.iter()
.next()
.expect("one event expected")
.expect("event should be extracted OK");
assert_eq!(topics, ev.topics());
}
}
+48
View File
@@ -0,0 +1,48 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! # subxt-core
//!
//! A `#[no_std]` compatible subset of the functionality provided in the `subxt` crate. This
//! contains the core logic for encoding and decoding things, but nothing related to networking.
//!
//! Here's an overview of the main things exposed here:
//!
//! - [`blocks`]: decode and explore block bodies.
//! - [`constants`]: access and validate the constant addresses in some metadata.
//! - [`custom_values`]: access and validate the custom value addresses in some metadata.
//! - [`storage`]: construct storage request payloads and decode the results you'd get back.
//! - [`tx`]: construct and sign transactions (extrinsics).
//! - [`runtime_api`]: construct runtime API request payloads and decode the results you'd get back.
//! - [`events`]: decode and explore events.
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
pub extern crate alloc;
pub mod blocks;
pub mod client;
pub mod config;
pub mod constants;
pub mod custom_values;
pub mod dynamic;
pub mod error;
pub mod events;
pub mod runtime_api;
pub mod storage;
pub mod tx;
pub mod utils;
pub mod view_functions;
pub use config::Config;
pub use error::Error;
pub use pezkuwi_subxt_metadata::Metadata;
/// Re-exports of some of the key external crates.
pub mod ext {
pub use codec;
pub use scale_decode;
pub use scale_encode;
pub use scale_value;
}
+117
View File
@@ -0,0 +1,117 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Encode runtime API payloads, decode the associated values returned from them, and validate
//! static runtime API payloads.
//!
//! # Example
//!
//! ```rust
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::runtime_api;
//! use pezkuwi_subxt_core::Metadata;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_small.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Some metadata we'll use to work with storage entries:
//! let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
//! let metadata = Metadata::decode_from(&metadata_bytes[..]).unwrap();
//!
//! // Build a storage query to access account information.
//! let payload = pezkuwi::apis().metadata().metadata_versions();
//!
//! // We can validate that the payload is compatible with the given metadata.
//! runtime_api::validate(&payload, &metadata).unwrap();
//!
//! // Encode the payload name and arguments to hand to a node:
//! let _call_name = runtime_api::call_name(&payload);
//! let _call_args = runtime_api::call_args(&payload, &metadata).unwrap();
//!
//! // If we were to obtain a value back from the node, we could
//! // then decode it using the same payload and metadata like so:
//! let value_bytes = hex::decode("080e0000000f000000").unwrap();
//! let value = runtime_api::decode_value(&mut &*value_bytes, &payload, &metadata).unwrap();
//!
//! println!("Available metadata versions: {value:?}");
//! ```
pub mod payload;
use crate::{Metadata, error::RuntimeApiError};
use alloc::{
format,
string::{String, ToString},
vec::Vec,
};
use payload::Payload;
use scale_decode::IntoVisitor;
/// Run the validation logic against some runtime API payload you'd like to use. Returns `Ok(())`
/// if the payload is valid (or if it's not possible to check since the payload has no validation
/// hash). Return an error if the payload was not valid or something went wrong trying to validate
/// it (ie the runtime API in question do not exist at all)
pub fn validate<P: Payload>(payload: P, metadata: &Metadata) -> Result<(), RuntimeApiError> {
let Some(hash) = payload.validation_hash() else {
return Ok(());
};
let trait_name = payload.trait_name();
let method_name = payload.method_name();
let api_trait = metadata
.runtime_api_trait_by_name(trait_name)
.ok_or_else(|| RuntimeApiError::TraitNotFound(trait_name.to_string()))?;
let api_method =
api_trait
.method_by_name(method_name)
.ok_or_else(|| RuntimeApiError::MethodNotFound {
trait_name: trait_name.to_string(),
method_name: method_name.to_string(),
})?;
if hash != api_method.hash() { Err(RuntimeApiError::IncompatibleCodegen) } else { Ok(()) }
}
/// Return the name of the runtime API call from the payload.
pub fn call_name<P: Payload>(payload: P) -> String {
format!("{}_{}", payload.trait_name(), payload.method_name())
}
/// Return the encoded call args given a runtime API payload.
pub fn call_args<P: Payload>(payload: P, metadata: &Metadata) -> Result<Vec<u8>, RuntimeApiError> {
let value = frame_decode::runtime_apis::encode_runtime_api_inputs(
payload.trait_name(),
payload.method_name(),
payload.args(),
metadata,
metadata.types(),
)
.map_err(RuntimeApiError::CouldNotEncodeInputs)?;
Ok(value)
}
/// Decode the value bytes at the location given by the provided runtime API payload.
pub fn decode_value<P: Payload>(
bytes: &mut &[u8],
payload: P,
metadata: &Metadata,
) -> Result<P::ReturnType, RuntimeApiError> {
let value = frame_decode::runtime_apis::decode_runtime_api_response(
payload.trait_name(),
payload.method_name(),
bytes,
metadata,
metadata.types(),
P::ReturnType::into_visitor(),
)
.map_err(RuntimeApiError::CouldNotDecodeResponse)?;
Ok(value)
}
+158
View File
@@ -0,0 +1,158 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module contains the trait and types used to represent
//! runtime API calls that can be made.
use alloc::{borrow::Cow, string::String};
use core::marker::PhantomData;
use derive_where::derive_where;
use frame_decode::runtime_apis::IntoEncodableValues;
use scale_decode::DecodeAsType;
/// This represents a runtime API payload that can be used to call a Runtime API on
/// a chain and decode the response.
pub trait Payload {
/// Type of the arguments.
type ArgsType: IntoEncodableValues;
/// The return type of the function call.
type ReturnType: DecodeAsType;
/// The runtime API trait name.
fn trait_name(&self) -> &str;
/// The runtime API method name.
fn method_name(&self) -> &str;
/// The input arguments.
fn args(&self) -> &Self::ArgsType;
/// Returns the statically generated validation hash.
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
// Any reference to a payload is a valid payload.
impl<P: Payload + ?Sized> Payload for &'_ P {
type ArgsType = P::ArgsType;
type ReturnType = P::ReturnType;
fn trait_name(&self) -> &str {
P::trait_name(*self)
}
fn method_name(&self) -> &str {
P::method_name(*self)
}
fn args(&self) -> &Self::ArgsType {
P::args(*self)
}
fn validation_hash(&self) -> Option<[u8; 32]> {
P::validation_hash(*self)
}
}
/// A runtime API payload containing the generic argument data
/// and interpreting the result of the call as `ReturnTy`.
///
/// This can be created from static values (ie those generated
/// via the `subxt` macro) or dynamic values via [`dynamic`].
#[derive_where(Clone, Debug, Eq, Ord, PartialEq, PartialOrd; ArgsType)]
pub struct StaticPayload<ArgsType, ReturnType> {
trait_name: Cow<'static, str>,
method_name: Cow<'static, str>,
args: ArgsType,
validation_hash: Option<[u8; 32]>,
_marker: PhantomData<ReturnType>,
}
/// A dynamic runtime API payload.
pub type DynamicPayload<ArgsType, ReturnType> = StaticPayload<ArgsType, ReturnType>;
impl<ArgsType: IntoEncodableValues, ReturnType: DecodeAsType> Payload
for StaticPayload<ArgsType, ReturnType>
{
type ArgsType = ArgsType;
type ReturnType = ReturnType;
fn trait_name(&self) -> &str {
&self.trait_name
}
fn method_name(&self) -> &str {
&self.method_name
}
fn args(&self) -> &Self::ArgsType {
&self.args
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.validation_hash
}
}
impl<ArgsType, ReturnTy> StaticPayload<ArgsType, ReturnTy> {
/// Create a new [`StaticPayload`].
pub fn new(
trait_name: impl Into<String>,
method_name: impl Into<String>,
args: ArgsType,
) -> Self {
StaticPayload {
trait_name: trait_name.into().into(),
method_name: method_name.into().into(),
args,
validation_hash: None,
_marker: PhantomData,
}
}
/// Create a new static [`StaticPayload`] using static function name
/// and scale-encoded argument data.
///
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(
trait_name: &'static str,
method_name: &'static str,
args: ArgsType,
hash: [u8; 32],
) -> StaticPayload<ArgsType, ReturnTy> {
StaticPayload {
trait_name: Cow::Borrowed(trait_name),
method_name: Cow::Borrowed(method_name),
args,
validation_hash: Some(hash),
_marker: core::marker::PhantomData,
}
}
/// Do not validate this call prior to submitting it.
pub fn unvalidated(self) -> Self {
Self { validation_hash: None, ..self }
}
/// Returns the trait name.
pub fn trait_name(&self) -> &str {
&self.trait_name
}
/// Returns the method name.
pub fn method_name(&self) -> &str {
&self.method_name
}
}
/// Create a new [`DynamicPayload`].
pub fn dynamic<ArgsType, ReturnType>(
trait_name: impl Into<String>,
method_name: impl Into<String>,
args_data: ArgsType,
) -> DynamicPayload<ArgsType, ReturnType> {
DynamicPayload::new(trait_name, method_name, args_data)
}
+169
View File
@@ -0,0 +1,169 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Construct addresses to access storage entries with.
use crate::utils::{Maybe, YesMaybe};
use alloc::{borrow::Cow, string::String, vec::Vec};
use frame_decode::storage::{IntoDecodableValues, IntoEncodableValues};
use scale_decode::DecodeAsType;
/// A storage address. This allows access to a given storage entry, which can then
/// be iterated over or fetched from by providing the relevant set of keys, or
/// otherwise inspected.
pub trait Address {
/// All of the keys required to get to an individual value at this address.
/// Keys must always impl [`IntoEncodableValues`], and for iteration must
/// also impl [`frame_decode::storage::IntoDecodableValues`].
type KeyParts: IntoEncodableValues + IntoDecodableValues;
/// Type of the storage value at this location.
type Value: DecodeAsType;
/// Does the address point to a plain value (as opposed to a map)?
/// Set to [`crate::utils::Yes`] to enable APIs which require a map,
/// or [`crate::utils::Maybe`] to enable APIs which allow a map.
type IsPlain: YesMaybe;
/// The pallet containing this storage entry.
fn pallet_name(&self) -> &str;
/// The name of the storage entry.
fn entry_name(&self) -> &str;
/// Return a unique hash for this address which can be used to validate it against metadata.
fn validation_hash(&self) -> Option<[u8; 32]>;
}
// Any reference to an address is a valid address.
impl<A: Address + ?Sized> Address for &'_ A {
type KeyParts = A::KeyParts;
type Value = A::Value;
type IsPlain = A::IsPlain;
fn pallet_name(&self) -> &str {
A::pallet_name(*self)
}
fn entry_name(&self) -> &str {
A::entry_name(*self)
}
fn validation_hash(&self) -> Option<[u8; 32]> {
A::validation_hash(*self)
}
}
/// An address which is generated by the static APIs.
pub struct StaticAddress<KeyParts, Value, IsPlain> {
pallet_name: Cow<'static, str>,
entry_name: Cow<'static, str>,
validation_hash: Option<[u8; 32]>,
marker: core::marker::PhantomData<(KeyParts, Value, IsPlain)>,
}
impl<KeyParts, Value, IsPlain> Clone for StaticAddress<KeyParts, Value, IsPlain> {
fn clone(&self) -> Self {
Self {
pallet_name: self.pallet_name.clone(),
entry_name: self.entry_name.clone(),
validation_hash: self.validation_hash,
marker: self.marker,
}
}
}
impl<KeyParts, Value, IsPlain> core::fmt::Debug for StaticAddress<KeyParts, Value, IsPlain> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("StaticAddress")
.field("pallet_name", &self.pallet_name)
.field("entry_name", &self.entry_name)
.field("validation_hash", &self.validation_hash)
.finish()
}
}
impl<KeyParts, Value, IsPlain> StaticAddress<KeyParts, Value, IsPlain> {
/// Create a new [`StaticAddress`] using static strings for the pallet and call name.
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(pallet_name: &'static str, entry_name: &'static str, hash: [u8; 32]) -> Self {
Self {
pallet_name: Cow::Borrowed(pallet_name),
entry_name: Cow::Borrowed(entry_name),
validation_hash: Some(hash),
marker: core::marker::PhantomData,
}
}
/// Create a new address.
pub fn new(pallet_name: impl Into<String>, entry_name: impl Into<String>) -> Self {
Self {
pallet_name: pallet_name.into().into(),
entry_name: entry_name.into().into(),
validation_hash: None,
marker: core::marker::PhantomData,
}
}
/// Do not validate this storage entry prior to accessing it.
pub fn unvalidated(mut self) -> Self {
self.validation_hash = None;
self
}
}
impl<KeyParts, Value, IsPlain> Address for StaticAddress<KeyParts, Value, IsPlain>
where
KeyParts: IntoEncodableValues + IntoDecodableValues,
Value: DecodeAsType,
IsPlain: YesMaybe,
{
type KeyParts = KeyParts;
type Value = Value;
type IsPlain = IsPlain;
fn pallet_name(&self) -> &str {
&self.pallet_name
}
fn entry_name(&self) -> &str {
&self.entry_name
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.validation_hash
}
}
impl<A: AsRef<str>, B: AsRef<str>> Address for (A, B) {
type KeyParts = Vec<scale_value::Value>;
type Value = scale_value::Value;
type IsPlain = Maybe;
fn pallet_name(&self) -> &str {
self.0.as_ref()
}
fn entry_name(&self) -> &str {
self.1.as_ref()
}
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
/// A dynamic address is simply a [`StaticAddress`] which asserts that the
/// entry *might* be a map and *might* have a default value.
pub type DynamicAddress<KeyParts = Vec<scale_value::Value>, Value = scale_value::Value> =
StaticAddress<KeyParts, Value, Maybe>;
/// Construct a new dynamic storage address. You can define the type of the
/// storage keys and value yourself here, but have no guarantee that they will
/// be correct.
pub fn dynamic<KeyParts: IntoEncodableValues, Value: DecodeAsType>(
pallet_name: impl Into<String>,
entry_name: impl Into<String>,
) -> DynamicAddress<KeyParts, Value> {
DynamicAddress::<KeyParts, Value>::new(pallet_name.into(), entry_name.into())
}
+90
View File
@@ -0,0 +1,90 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Encode storage keys, decode storage values, and validate static storage addresses.
//!
//! # Example
//!
//! ```rust
//! use pezkuwi_subxt_signer::sr25519::dev;
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::storage;
//! use pezkuwi_subxt_core::Metadata;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_small.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Some metadata we'll use to work with storage entries:
//! let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
//! let metadata = Metadata::decode_from(&metadata_bytes[..]).unwrap();
//!
//! // Build a storage query to access account information.
//! let address = pezkuwi::storage().system().account();
//!
//! // We can validate that the address is compatible with the given metadata.
//! storage::validate(&address, &metadata).unwrap();
//!
//! // We can fetch details about the storage entry associated with this address:
//! let entry = storage::entry(address, &metadata).unwrap();
//!
//! // .. including generating a key to fetch the entry with:
//! let fetch_key = entry.fetch_key((dev::alice().public_key().into(),)).unwrap();
//!
//! // .. or generating a key to iterate over entries with at a given depth:
//! let iter_key = entry.iter_key(()).unwrap();
//!
//! // Given a value, we can decode it:
//! let value_bytes = hex::decode("00000000000000000100000000000000000064a7b3b6e00d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080").unwrap();
//! let value = entry.value(value_bytes).decode().unwrap();
//!
//! println!("Alice's account info: {value:?}");
//! ```
mod prefix_of;
mod storage_entry;
mod storage_key;
mod storage_key_value;
mod storage_value;
pub mod address;
use crate::{Metadata, error::StorageError};
use address::Address;
use alloc::string::ToString;
pub use prefix_of::{EqualOrPrefixOf, PrefixOf};
pub use storage_entry::{StorageEntry, entry};
pub use storage_key::{StorageHasher, StorageKey, StorageKeyPart};
pub use storage_key_value::StorageKeyValue;
pub use storage_value::StorageValue;
/// When the provided `address` is statically generated via the `#[subxt]` macro, this validates
/// that the shape of the storage value is the same as the shape expected by the static address.
///
/// When the provided `address` is dynamic (and thus does not come with any expectation of the
/// shape of the constant value), this just returns `Ok(())`
pub fn validate<Addr: Address>(address: Addr, metadata: &Metadata) -> Result<(), StorageError> {
let Some(hash) = address.validation_hash() else {
return Ok(());
};
let pallet_name = address.pallet_name();
let entry_name = address.entry_name();
let pallet_metadata = metadata
.pallet_by_name(pallet_name)
.ok_or_else(|| StorageError::PalletNameNotFound(pallet_name.to_string()))?;
let storage_hash = pallet_metadata.storage_hash(entry_name).ok_or_else(|| {
StorageError::StorageEntryNotFound {
pallet_name: pallet_name.to_string(),
entry_name: entry_name.to_string(),
}
})?;
if storage_hash != hash { Err(StorageError::IncompatibleCodegen) } else { Ok(()) }
}
+195
View File
@@ -0,0 +1,195 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use alloc::vec::Vec;
use frame_decode::helpers::IntoEncodableValues;
use scale_encode::EncodeAsType;
/// For a given set of values that can be used as keys for a storage entry,
/// this is implemented for any prefixes of that set. ie if the keys `(A,B,C)`
/// would access a storage value, then `PrefixOf<(A,B,C)>` is implemented for
/// `(A,B)`, `(A,)` and `()`.
pub trait PrefixOf<Keys>: IntoEncodableValues {}
// If T impls PrefixOf<K>, &T impls PrefixOf<K>.
impl<K, T: PrefixOf<K>> PrefixOf<K> for &T {}
// Impls for tuples up to length 6 (storage maps rarely require more than 2 entries
// so it's very unlikely we'll ever need to go this deep).
impl<A> PrefixOf<(A,)> for () {}
impl<A, B> PrefixOf<(A, B)> for () {}
impl<A, B> PrefixOf<(A, B)> for (A,) where (A,): IntoEncodableValues {}
impl<A, B, C> PrefixOf<(A, B, C)> for () {}
impl<A, B, C> PrefixOf<(A, B, C)> for (A,) where (A,): IntoEncodableValues {}
impl<A, B, C> PrefixOf<(A, B, C)> for (A, B) where (A, B): IntoEncodableValues {}
impl<A, B, C, D> PrefixOf<(A, B, C, D)> for () {}
impl<A, B, C, D> PrefixOf<(A, B, C, D)> for (A,) where (A,): IntoEncodableValues {}
impl<A, B, C, D> PrefixOf<(A, B, C, D)> for (A, B) where (A, B): IntoEncodableValues {}
impl<A, B, C, D> PrefixOf<(A, B, C, D)> for (A, B, C) where (A, B, C): IntoEncodableValues {}
impl<A, B, C, D, E> PrefixOf<(A, B, C, D, E)> for () {}
impl<A, B, C, D, E> PrefixOf<(A, B, C, D, E)> for (A,) where (A,): IntoEncodableValues {}
impl<A, B, C, D, E> PrefixOf<(A, B, C, D, E)> for (A, B) where (A, B): IntoEncodableValues {}
impl<A, B, C, D, E> PrefixOf<(A, B, C, D, E)> for (A, B, C) where (A, B, C): IntoEncodableValues {}
impl<A, B, C, D, E> PrefixOf<(A, B, C, D, E)> for (A, B, C, D) where
(A, B, C, D): IntoEncodableValues
{
}
impl<A, B, C, D, E, F> PrefixOf<(A, B, C, D, E, F)> for () {}
impl<A, B, C, D, E, F> PrefixOf<(A, B, C, D, E, F)> for (A,) where (A,): IntoEncodableValues {}
impl<A, B, C, D, E, F> PrefixOf<(A, B, C, D, E, F)> for (A, B) where (A, B): IntoEncodableValues {}
impl<A, B, C, D, E, F> PrefixOf<(A, B, C, D, E, F)> for (A, B, C) where
(A, B, C): IntoEncodableValues
{
}
impl<A, B, C, D, E, F> PrefixOf<(A, B, C, D, E, F)> for (A, B, C, D) where
(A, B, C, D): IntoEncodableValues
{
}
impl<A, B, C, D, E, F> PrefixOf<(A, B, C, D, E, F)> for (A, B, C, D, E) where
(A, B, C, D, E): IntoEncodableValues
{
}
// Vecs are prefixes of vecs. The length is not statically known and so
// these would be given dynamically only, leaving the correct length to the user.
impl<T: EncodeAsType> PrefixOf<Vec<T>> for Vec<T> {}
// We don't use arrays in Subxt for storage entry access, but `IntoEncodableValues`
// supports them so let's allow impls which do use them to benefit too.
macro_rules! array_impl {
($n:literal: $($p:literal)+) => {
$(
impl <T: EncodeAsType> PrefixOf<[T; $n]> for [T; $p] {}
)+
}
}
array_impl!(1: 0);
array_impl!(2: 1 0);
array_impl!(3: 2 1 0);
array_impl!(4: 3 2 1 0);
array_impl!(5: 4 3 2 1 0);
array_impl!(6: 5 4 3 2 1 0);
/// This is much like [`PrefixOf`] except that it also includes `Self` as an allowed type,
/// where `Self` must impl [`IntoEncodableValues`] just as every [`PrefixOf<Self>`] does.
pub trait EqualOrPrefixOf<K>: IntoEncodableValues {}
// Tuples
macro_rules! tuple_impl_eq {
($($t:ident)+) => {
// Any T that is a PrefixOf<Keys> impls EqualOrPrefixOf<keys> too
impl <$($t,)+ T: PrefixOf<($($t,)+)>> EqualOrPrefixOf<($($t,)+)> for T {}
// Keys impls EqualOrPrefixOf<Keys>
impl <$($t),+> EqualOrPrefixOf<($($t,)+)> for ($($t,)+) where ($($t,)+): IntoEncodableValues {}
// &'a Keys impls EqualOrPrefixOf<Keys>
impl <'a, $($t),+> EqualOrPrefixOf<($($t,)+)> for &'a ($($t,)+) where ($($t,)+): IntoEncodableValues {}
}
}
tuple_impl_eq!(A);
tuple_impl_eq!(A B);
tuple_impl_eq!(A B C);
tuple_impl_eq!(A B C D);
tuple_impl_eq!(A B C D E);
tuple_impl_eq!(A B C D E F);
// Vec
impl<T: EncodeAsType> EqualOrPrefixOf<Vec<T>> for Vec<T> {}
impl<T: EncodeAsType> EqualOrPrefixOf<Vec<T>> for &Vec<T> {}
// Arrays
macro_rules! array_impl_eq {
($($n:literal)+) => {
$(
impl <A: EncodeAsType> EqualOrPrefixOf<[A; $n]> for [A; $n] {}
impl <'a, A: EncodeAsType> EqualOrPrefixOf<[A; $n]> for &'a [A; $n] {}
)+
}
}
impl<const N: usize, A, T> EqualOrPrefixOf<[A; N]> for T where T: PrefixOf<[A; N]> {}
array_impl_eq!(1 2 3 4 5 6);
#[cfg(test)]
mod test {
use super::*;
struct Test<Keys: IntoEncodableValues>(core::marker::PhantomData<Keys>);
impl<Keys: IntoEncodableValues> Test<Keys> {
fn new() -> Self {
Test(core::marker::PhantomData)
}
fn accepts_prefix_of<P: PrefixOf<Keys>>(&self, keys: P) {
let _encoder = keys.into_encodable_values();
}
fn accepts_eq_or_prefix_of<P: EqualOrPrefixOf<Keys>>(&self, keys: P) {
let _encoder = keys.into_encodable_values();
}
}
#[test]
fn test_prefix_of() {
// In real life we'd have a struct a bit like this:
let t = Test::<(bool, String, u64)>::new();
// And we'd want to be able to call some method like this:
//// This shouldn't work:
// t.accepts_prefix_of((true, String::from("hi"), 0));
t.accepts_prefix_of(&(true, String::from("hi")));
t.accepts_prefix_of((true, String::from("hi")));
t.accepts_prefix_of((true,));
t.accepts_prefix_of(());
let t = Test::<[u64; 5]>::new();
//// This shouldn't work:
// t.accepts_prefix_of([0,1,2,3,4]);
t.accepts_prefix_of([0, 1, 2, 3]);
t.accepts_prefix_of([0, 1, 2, 3]);
t.accepts_prefix_of([0, 1, 2]);
t.accepts_prefix_of([0, 1]);
t.accepts_prefix_of([0]);
t.accepts_prefix_of([]);
}
#[test]
fn test_eq_or_prefix_of() {
// In real life we'd have a struct a bit like this:
let t = Test::<(bool, String, u64)>::new();
// And we'd want to be able to call some method like this:
t.accepts_eq_or_prefix_of(&(true, String::from("hi"), 0));
t.accepts_eq_or_prefix_of(&(true, String::from("hi")));
t.accepts_eq_or_prefix_of((true,));
t.accepts_eq_or_prefix_of(());
t.accepts_eq_or_prefix_of((true, String::from("hi"), 0));
t.accepts_eq_or_prefix_of((true, String::from("hi")));
t.accepts_eq_or_prefix_of((true,));
t.accepts_eq_or_prefix_of(());
let t = Test::<[u64; 5]>::new();
t.accepts_eq_or_prefix_of([0, 1, 2, 3, 4]);
t.accepts_eq_or_prefix_of([0, 1, 2, 3]);
t.accepts_eq_or_prefix_of([0, 1, 2]);
t.accepts_eq_or_prefix_of([0, 1]);
t.accepts_eq_or_prefix_of([0]);
t.accepts_eq_or_prefix_of([]);
t.accepts_eq_or_prefix_of([0, 1, 2, 3, 4]);
t.accepts_eq_or_prefix_of([0, 1, 2, 3]);
t.accepts_eq_or_prefix_of([0, 1, 2]);
t.accepts_eq_or_prefix_of([0, 1]);
t.accepts_eq_or_prefix_of([0]);
t.accepts_eq_or_prefix_of([]);
}
}
+144
View File
@@ -0,0 +1,144 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::{PrefixOf, StorageKeyValue, StorageValue, address::Address};
use crate::{error::StorageError, utils::YesMaybe};
use alloc::{sync::Arc, vec::Vec};
use frame_decode::storage::{IntoEncodableValues, StorageInfo};
use pezkuwi_subxt_metadata::Metadata;
use scale_info::PortableRegistry;
/// Create a [`StorageEntry`] to work with a given storage entry.
pub fn entry<'info, Addr: Address>(
address: Addr,
metadata: &'info Metadata,
) -> Result<StorageEntry<'info, Addr>, StorageError> {
super::validate(&address, metadata)?;
use frame_decode::storage::StorageTypeInfo;
let types = metadata.types();
let info = metadata
.storage_info(address.pallet_name(), address.entry_name())
.map_err(|e| StorageError::StorageInfoError(e.into_owned()))?;
Ok(StorageEntry(Arc::new(StorageEntryInner { address, info: Arc::new(info), types })))
}
/// This represents a single storage entry (be it a plain value or map).
pub struct StorageEntry<'info, Addr>(Arc<StorageEntryInner<'info, Addr>>);
impl<'info, Addr> Clone for StorageEntry<'info, Addr> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
struct StorageEntryInner<'info, Addr> {
address: Addr,
info: Arc<StorageInfo<'info, u32>>,
types: &'info PortableRegistry,
}
impl<'info, Addr: Address> StorageEntry<'info, Addr> {
/// Name of the pallet containing this storage entry.
pub fn pallet_name(&self) -> &str {
self.0.address.pallet_name()
}
/// Name of the storage entry.
pub fn entry_name(&self) -> &str {
self.0.address.entry_name()
}
/// Is the storage entry a plain value?
pub fn is_plain(&self) -> bool {
self.0.info.keys.is_empty()
}
/// Is the storage entry a map?
pub fn is_map(&self) -> bool {
!self.is_plain()
}
/// Instantiate a [`StorageKeyValue`] for this entry.
///
/// It is expected that the bytes are obtained by iterating key/value pairs at this address.
pub fn key_value(
&self,
key_bytes: impl Into<Arc<[u8]>>,
value_bytes: Vec<u8>,
) -> StorageKeyValue<'info, Addr> {
StorageKeyValue::new(self.0.info.clone(), self.0.types, key_bytes.into(), value_bytes)
}
/// Instantiate a [`StorageValue`] for this entry.
///
/// It is expected that the bytes are obtained by fetching a value at this address.
pub fn value(&self, bytes: Vec<u8>) -> StorageValue<'info, Addr::Value> {
StorageValue::new(self.0.info.clone(), self.0.types, bytes)
}
/// Return the default [`StorageValue`] for this storage entry, if there is one.
pub fn default_value(&self) -> Option<StorageValue<'info, Addr::Value>> {
self.0.info.default_value.as_deref().map(|default_bytes| {
StorageValue::new(self.0.info.clone(), self.0.types, default_bytes.to_vec())
})
}
/// The keys for plain storage values are always 32 byte hashes.
pub fn key_prefix(&self) -> [u8; 32] {
frame_decode::storage::encode_storage_key_prefix(
self.0.address.pallet_name(),
self.0.address.entry_name(),
)
}
// This has a less "strict" type signature and so is just used under the hood.
fn key<Keys: IntoEncodableValues>(&self, key_parts: Keys) -> Result<Vec<u8>, StorageError> {
let key = frame_decode::storage::encode_storage_key_with_info(
self.0.address.pallet_name(),
self.0.address.entry_name(),
key_parts,
&self.0.info,
self.0.types,
)
.map_err(StorageError::StorageKeyEncodeError)?;
Ok(key)
}
/// This constructs a key suitable for fetching a value at the given map storage address. This
/// will error if we can see that the wrong number of key parts are provided.
pub fn fetch_key(&self, key_parts: Addr::KeyParts) -> Result<Vec<u8>, StorageError> {
if key_parts.num_encodable_values() != self.0.info.keys.len() {
Err(StorageError::WrongNumberOfKeyPartsProvidedForFetching {
expected: self.0.info.keys.len(),
got: key_parts.num_encodable_values(),
})
} else {
self.key(key_parts)
}
}
/// This constructs a key suitable for iterating at the given storage address. This will error
/// if we can see that too many key parts are provided.
pub fn iter_key<Keys: PrefixOf<Addr::KeyParts>>(
&self,
key_parts: Keys,
) -> Result<Vec<u8>, StorageError> {
if Addr::IsPlain::is_yes() {
Err(StorageError::CannotIterPlainEntry {
pallet_name: self.0.address.pallet_name().into(),
entry_name: self.0.address.entry_name().into(),
})
} else if key_parts.num_encodable_values() >= self.0.info.keys.len() {
Err(StorageError::WrongNumberOfKeyPartsProvidedForIterating {
max_expected: self.0.info.keys.len() - 1,
got: key_parts.num_encodable_values(),
})
} else {
self.key(key_parts)
}
}
}
+123
View File
@@ -0,0 +1,123 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::error::StorageKeyError;
use alloc::sync::Arc;
use core::marker::PhantomData;
use frame_decode::storage::{IntoDecodableValues, StorageInfo, StorageKey as StorageKeyPartInfo};
use scale_info::PortableRegistry;
pub use frame_decode::storage::StorageHasher;
/// This represents the different parts of a storage key.
pub struct StorageKey<'info, KeyParts> {
info: Arc<StorageKeyPartInfo<u32>>,
types: &'info PortableRegistry,
bytes: Arc<[u8]>,
marker: PhantomData<KeyParts>,
}
impl<'info, KeyParts: IntoDecodableValues> StorageKey<'info, KeyParts> {
pub(crate) fn new(
info: &StorageInfo<'info, u32>,
types: &'info PortableRegistry,
bytes: Arc<[u8]>,
) -> Result<Self, StorageKeyError> {
let cursor = &mut &*bytes;
let storage_key_info = frame_decode::storage::decode_storage_key_with_info(
cursor, info, types,
)
.map_err(|e| StorageKeyError::StorageKeyDecodeError { bytes: bytes.to_vec(), error: e })?;
if !cursor.is_empty() {
return Err(StorageKeyError::LeftoverBytes { bytes: cursor.to_vec() });
}
Ok(StorageKey { info: Arc::new(storage_key_info), types, bytes, marker: PhantomData })
}
/// Attempt to decode the values contained within this storage key. The target type is
/// given by the storage address used to access this entry. To decode into a custom type,
/// use [`Self::parts()`] or [`Self::part()`] and decode each part.
pub fn decode(&self) -> Result<KeyParts, StorageKeyError> {
let values =
frame_decode::storage::decode_storage_key_values(&self.bytes, &self.info, self.types)
.map_err(StorageKeyError::CannotDecodeValuesInKey)?;
Ok(values)
}
/// Iterate over the parts of this storage key. Each part of a storage key corresponds to a
/// single value that has been hashed.
pub fn parts(&self) -> impl ExactSizeIterator<Item = StorageKeyPart<'info>> {
let parts_len = self.info.parts().len();
(0..parts_len).map(move |index| StorageKeyPart {
index,
info: self.info.clone(),
types: self.types,
bytes: self.bytes.clone(),
})
}
/// Return the part of the storage key at the provided index, or `None` if the index is out of
/// bounds.
pub fn part(&self, index: usize) -> Option<StorageKeyPart<'info>> {
if index < self.parts().len() {
Some(StorageKeyPart {
index,
info: self.info.clone(),
types: self.types,
bytes: self.bytes.clone(),
})
} else {
None
}
}
}
/// This represents a part of a storage key.
pub struct StorageKeyPart<'info> {
index: usize,
info: Arc<StorageKeyPartInfo<u32>>,
types: &'info PortableRegistry,
bytes: Arc<[u8]>,
}
impl<'info> StorageKeyPart<'info> {
/// Get the raw bytes for this part of the storage key.
pub fn bytes(&self) -> &[u8] {
let part = &self.info[self.index];
let hash_range = part.hash_range();
let value_range = part
.value()
.map(|v| v.range())
.unwrap_or(core::ops::Range { start: hash_range.end, end: hash_range.end });
let combined_range = core::ops::Range { start: hash_range.start, end: value_range.end };
&self.bytes[combined_range]
}
/// Get the hasher that was used to construct this part of the storage key.
pub fn hasher(&self) -> StorageHasher {
self.info[self.index].hasher()
}
/// For keys that were produced using "concat" or "identity" hashers, the value
/// is available as a part of the key hash, allowing us to decode it into anything
/// implementing [`scale_decode::DecodeAsType`]. If the key was produced using a
/// different hasher, this will return `None`.
pub fn decode_as<T: scale_decode::DecodeAsType>(&self) -> Result<Option<T>, StorageKeyError> {
let part_info = &self.info[self.index];
let Some(value_info) = part_info.value() else {
return Ok(None);
};
let value_bytes = &self.bytes[value_info.range()];
let value_ty = *value_info.ty();
let decoded_key_part = T::decode_as_type(&mut &*value_bytes, value_ty, self.types)
.map_err(|e| StorageKeyError::CannotDecodeValueInKey { index: self.index, error: e })?;
Ok(Some(decoded_key_part))
}
}
@@ -0,0 +1,45 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::{Address, StorageKey, StorageValue};
use crate::error::StorageKeyError;
use alloc::{sync::Arc, vec::Vec};
use frame_decode::storage::StorageInfo;
use scale_info::PortableRegistry;
/// This represents a storage key/value pair, which is typically returned from
/// iterating over values in some storage map.
#[derive(Debug)]
pub struct StorageKeyValue<'info, Addr: Address> {
key: Arc<[u8]>,
// This contains the storage information already:
value: StorageValue<'info, Addr::Value>,
}
impl<'info, Addr: Address> StorageKeyValue<'info, Addr> {
pub(crate) fn new(
info: Arc<StorageInfo<'info, u32>>,
types: &'info PortableRegistry,
key_bytes: Arc<[u8]>,
value_bytes: Vec<u8>,
) -> Self {
StorageKeyValue { key: key_bytes, value: StorageValue::new(info, types, value_bytes) }
}
/// Get the raw bytes for this storage entry's key.
pub fn key_bytes(&self) -> &[u8] {
&self.key
}
/// Decode the key for this storage entry. This gives back a type from which we can
/// decode specific parts of the key hash (where applicable).
pub fn key(&'_ self) -> Result<StorageKey<'info, Addr::KeyParts>, StorageKeyError> {
StorageKey::new(&self.value.info, self.value.types, self.key.clone())
}
/// Return the storage value.
pub fn value(&self) -> &StorageValue<'info, Addr::Value> {
&self.value
}
}
+63
View File
@@ -0,0 +1,63 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::error::StorageValueError;
use alloc::{sync::Arc, vec::Vec};
use core::marker::PhantomData;
use frame_decode::storage::StorageInfo;
use scale_decode::DecodeAsType;
use scale_info::PortableRegistry;
/// This represents a storage value.
#[derive(Debug)]
pub struct StorageValue<'info, Value> {
pub(crate) info: Arc<StorageInfo<'info, u32>>,
pub(crate) types: &'info PortableRegistry,
bytes: Vec<u8>,
marker: PhantomData<Value>,
}
impl<'info, Value: DecodeAsType> StorageValue<'info, Value> {
pub(crate) fn new(
info: Arc<StorageInfo<'info, u32>>,
types: &'info PortableRegistry,
bytes: Vec<u8>,
) -> StorageValue<'info, Value> {
StorageValue { info, types, bytes, marker: PhantomData }
}
/// Get the raw bytes for this storage value.
pub fn bytes(&self) -> &[u8] {
&self.bytes
}
/// Consume this storage value and return the raw bytes.
pub fn into_bytes(self) -> Vec<u8> {
self.bytes.to_vec()
}
/// Decode this storage value into the provided response type.
pub fn decode(&self) -> Result<Value, StorageValueError> {
self.decode_as::<Value>()
}
/// Decode this storage value into an arbitrary type.
pub fn decode_as<T: DecodeAsType>(&self) -> Result<T, StorageValueError> {
let cursor = &mut &*self.bytes;
let value = frame_decode::storage::decode_storage_value_with_info(
cursor,
&self.info,
self.types,
T::into_visitor(),
)
.map_err(StorageValueError::CannotDecode)?;
if !cursor.is_empty() {
return Err(StorageValueError::LeftoverBytes { bytes: cursor.to_vec() });
}
Ok(value)
}
}
+437
View File
@@ -0,0 +1,437 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Construct and sign transactions.
//!
//! # Example
//!
//! ```rust
//! use pezkuwi_subxt_signer::sr25519::dev;
//! use pezkuwi_subxt_macro::subxt;
//! use pezkuwi_subxt_core::config::{PezkuwiConfig, HashFor};
//! use pezkuwi_subxt_core::config::DefaultExtrinsicParamsBuilder as Params;
//! use pezkuwi_subxt_core::tx;
//! use pezkuwi_subxt_core::utils::H256;
//! use pezkuwi_subxt_core::Metadata;
//!
//! // If we generate types without `subxt`, we need to point to `::pezkuwi_subxt_core`:
//! #[subxt(
//! crate = "::pezkuwi_subxt_core",
//! runtime_metadata_path = "../artifacts/pezkuwi_metadata_small.scale",
//! )]
//! pub mod pezkuwi {}
//!
//! // Gather some other information about the chain that we'll need to construct valid extrinsics:
//! let state = tx::ClientState::<PezkuwiConfig> {
//! metadata: {
//! let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
//! Metadata::decode_from(&metadata_bytes[..]).unwrap()
//! },
//! genesis_hash: {
//! let h = "91b171bb158e2d3848fa23a9f1c25182fb8e20313b2c1eb49219da7a70ce90c3";
//! let bytes = hex::decode(h).unwrap();
//! H256::from_slice(&bytes)
//! },
//! runtime_version: tx::RuntimeVersion {
//! spec_version: 9370,
//! transaction_version: 20,
//! }
//! };
//!
//! // Now we can build a balance transfer extrinsic.
//! let dest = dev::bob().public_key().into();
//! let call = pezkuwi::tx().balances().transfer_allow_death(dest, 10_000);
//! let params = Params::new().tip(1_000).nonce(0).build();
//!
//! // We can validate that this lines up with the given metadata:
//! tx::validate(&call, &state.metadata).unwrap();
//!
//! // We can build a signed transaction:
//! let signed_call = tx::create_v4_signed(&call, &state, params)
//! .unwrap()
//! .sign(&dev::alice());
//!
//! // And log it:
//! println!("Tx: 0x{}", hex::encode(signed_call.encoded()));
//! ```
pub mod payload;
pub mod signer;
use crate::{
Metadata,
config::{Config, ExtrinsicParams, ExtrinsicParamsEncoder, HashFor, Hasher},
error::ExtrinsicError,
utils::Encoded,
};
use alloc::{borrow::Cow, string::ToString, vec::Vec};
use codec::{Compact, Encode};
use payload::Payload;
use pezsp_crypto_hashing::blake2_256;
use signer::Signer as SignerT;
// Expose these here since we expect them in some calls below.
pub use crate::client::{ClientState, RuntimeVersion};
/// Run the validation logic against some extrinsic you'd like to submit. Returns `Ok(())`
/// if the call is valid (or if it's not possible to check since the call has no validation hash).
/// Return an error if the call was not valid or something went wrong trying to validate it (ie
/// the pallet or call in question do not exist at all).
pub fn validate<Call: Payload>(call: &Call, metadata: &Metadata) -> Result<(), ExtrinsicError> {
let Some(details) = call.validation_details() else {
return Ok(());
};
let pallet_name = details.pallet_name;
let call_name = details.call_name;
let expected_hash = metadata
.pallet_by_name(pallet_name)
.ok_or_else(|| ExtrinsicError::PalletNameNotFound(pallet_name.to_string()))?
.call_hash(call_name)
.ok_or_else(|| ExtrinsicError::CallNameNotFound {
pallet_name: pallet_name.to_string(),
call_name: call_name.to_string(),
})?;
if details.hash != expected_hash { Err(ExtrinsicError::IncompatibleCodegen) } else { Ok(()) }
}
/// Returns the suggested transaction versions to build for a given chain, or an error
/// if Subxt doesn't support any version expected by the chain.
///
/// If the result is [`TransactionVersion::V4`], use the `v4` methods in this module. If it's
/// [`TransactionVersion::V5`], use the `v5` ones.
pub fn suggested_version(metadata: &Metadata) -> Result<TransactionVersion, ExtrinsicError> {
let versions = metadata.extrinsic().supported_versions();
if versions.contains(&4) {
Ok(TransactionVersion::V4)
} else if versions.contains(&5) {
Ok(TransactionVersion::V5)
} else {
Err(ExtrinsicError::UnsupportedVersion)
}
}
/// The transaction versions supported by Subxt.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum TransactionVersion {
/// v4 transactions (signed and unsigned transactions)
V4,
/// v5 transactions (bare and general transactions)
V5,
}
/// Return the SCALE encoded bytes representing the call data of the transaction.
pub fn call_data<Call: Payload>(
call: &Call,
metadata: &Metadata,
) -> Result<Vec<u8>, ExtrinsicError> {
let mut bytes = Vec::new();
call.encode_call_data_to(metadata, &mut bytes)?;
Ok(bytes)
}
/// Creates a V4 "unsigned" transaction without submitting it.
pub fn create_v4_unsigned<T: Config, Call: Payload>(
call: &Call,
metadata: &Metadata,
) -> Result<Transaction<T>, ExtrinsicError> {
create_unsigned_at_version(call, 4, metadata)
}
/// Creates a V5 "bare" transaction without submitting it.
pub fn create_v5_bare<T: Config, Call: Payload>(
call: &Call,
metadata: &Metadata,
) -> Result<Transaction<T>, ExtrinsicError> {
create_unsigned_at_version(call, 5, metadata)
}
// Create a V4 "unsigned" transaction or V5 "bare" transaction.
fn create_unsigned_at_version<T: Config, Call: Payload>(
call: &Call,
tx_version: u8,
metadata: &Metadata,
) -> Result<Transaction<T>, ExtrinsicError> {
// 1. Validate this call against the current node metadata if the call comes
// with a hash allowing us to do so.
validate(call, metadata)?;
// 2. Encode extrinsic
let extrinsic = {
let mut encoded_inner = Vec::new();
// encode the transaction version first.
tx_version.encode_to(&mut encoded_inner);
// encode call data after this byte.
call.encode_call_data_to(metadata, &mut encoded_inner)?;
// now, prefix byte length:
let len = Compact(
u32::try_from(encoded_inner.len()).expect("extrinsic size expected to be <4GB"),
);
let mut encoded = Vec::new();
len.encode_to(&mut encoded);
encoded.extend(encoded_inner);
encoded
};
// Wrap in Encoded to ensure that any more "encode" calls leave it in the right state.
Ok(Transaction::from_bytes(extrinsic))
}
/// Construct a v4 extrinsic, ready to be signed.
pub fn create_v4_signed<T: Config, Call: Payload>(
call: &Call,
client_state: &ClientState<T>,
params: <T::ExtrinsicParams as ExtrinsicParams<T>>::Params,
) -> Result<PartialTransactionV4<T>, ExtrinsicError> {
// 1. Validate this call against the current node metadata if the call comes
// with a hash allowing us to do so.
validate(call, &client_state.metadata)?;
// 2. SCALE encode call data to bytes (pallet u8, call u8, call params).
let call_data = call_data(call, &client_state.metadata)?;
// 3. Construct our custom additional/extra params.
let additional_and_extra_params =
<T::ExtrinsicParams as ExtrinsicParams<T>>::new(client_state, params)?;
// Return these details, ready to construct a signed extrinsic from.
Ok(PartialTransactionV4 { call_data, additional_and_extra_params })
}
/// Construct a v5 "general" extrinsic, ready to be signed or emitted as is.
pub fn create_v5_general<T: Config, Call: Payload>(
call: &Call,
client_state: &ClientState<T>,
params: <T::ExtrinsicParams as ExtrinsicParams<T>>::Params,
) -> Result<PartialTransactionV5<T>, ExtrinsicError> {
// 1. Validate this call against the current node metadata if the call comes
// with a hash allowing us to do so.
validate(call, &client_state.metadata)?;
// 2. Work out which TX extension version to target based on metadata.
let tx_extensions_version = client_state
.metadata
.extrinsic()
.transaction_extension_version_to_use_for_encoding();
// 3. SCALE encode call data to bytes (pallet u8, call u8, call params).
let call_data = call_data(call, &client_state.metadata)?;
// 4. Construct our custom additional/extra params.
let additional_and_extra_params =
<T::ExtrinsicParams as ExtrinsicParams<T>>::new(client_state, params)?;
// Return these details, ready to construct a signed extrinsic from.
Ok(PartialTransactionV5 { call_data, additional_and_extra_params, tx_extensions_version })
}
/// A partially constructed V4 extrinsic, ready to be signed.
pub struct PartialTransactionV4<T: Config> {
call_data: Vec<u8>,
additional_and_extra_params: T::ExtrinsicParams,
}
impl<T: Config> PartialTransactionV4<T> {
/// Return the bytes representing the call data for this partially constructed
/// extrinsic.
pub fn call_data(&self) -> &[u8] {
&self.call_data
}
// Obtain bytes representing the signer payload and run call some function
// with them. This can avoid an allocation in some cases.
fn with_signer_payload<F, R>(&self, f: F) -> R
where
F: for<'a> FnOnce(Cow<'a, [u8]>) -> R,
{
let mut bytes = self.call_data.clone();
self.additional_and_extra_params.encode_signer_payload_value_to(&mut bytes);
self.additional_and_extra_params.encode_implicit_to(&mut bytes);
if bytes.len() > 256 { f(Cow::Borrowed(&blake2_256(&bytes))) } else { f(Cow::Owned(bytes)) }
}
/// Return the V4 signer payload for this extrinsic. These are the bytes that must
/// be signed in order to produce a valid signature for the extrinsic.
pub fn signer_payload(&self) -> Vec<u8> {
self.with_signer_payload(|bytes| bytes.to_vec())
}
/// Convert this [`PartialTransactionV4`] into a V4 signed [`Transaction`], ready to submit.
/// The provided `signer` is responsible for providing the "from" address for the transaction,
/// as well as providing a signature to attach to it.
pub fn sign<Signer>(&self, signer: &Signer) -> Transaction<T>
where
Signer: SignerT<T>,
{
// Given our signer, we can sign the payload representing this extrinsic.
let signature = self.with_signer_payload(|bytes| signer.sign(&bytes));
// Now, use the signature and "from" address to build the extrinsic.
self.sign_with_account_and_signature(signer.account_id(), &signature)
}
/// Convert this [`PartialTransactionV4`] into a V4 signed [`Transaction`], ready to submit.
/// The provided `address` and `signature` will be used.
pub fn sign_with_account_and_signature(
&self,
account_id: T::AccountId,
signature: &T::Signature,
) -> Transaction<T> {
let extrinsic = {
let mut encoded_inner = Vec::new();
// "is signed" + transaction protocol version (4)
(0b10000000 + 4u8).encode_to(&mut encoded_inner);
// from address for signature
let address: T::Address = account_id.into();
address.encode_to(&mut encoded_inner);
// the signature
signature.encode_to(&mut encoded_inner);
// attach custom extra params
self.additional_and_extra_params.encode_value_to(&mut encoded_inner);
// and now, call data (remembering that it's been encoded already and just needs
// appending)
encoded_inner.extend(&self.call_data);
// now, prefix byte length:
let len = Compact(
u32::try_from(encoded_inner.len()).expect("extrinsic size expected to be <4GB"),
);
let mut encoded = Vec::new();
len.encode_to(&mut encoded);
encoded.extend(encoded_inner);
encoded
};
// Return an extrinsic ready to be submitted.
Transaction::from_bytes(extrinsic)
}
}
/// A partially constructed V5 general extrinsic, ready to be signed or emitted as-is.
pub struct PartialTransactionV5<T: Config> {
call_data: Vec<u8>,
additional_and_extra_params: T::ExtrinsicParams,
tx_extensions_version: u8,
}
impl<T: Config> PartialTransactionV5<T> {
/// Return the bytes representing the call data for this partially constructed
/// extrinsic.
pub fn call_data(&self) -> &[u8] {
&self.call_data
}
/// Return the V5 signer payload for this extrinsic. These are the bytes that must
/// be signed in order to produce a valid signature for the extrinsic.
pub fn signer_payload(&self) -> [u8; 32] {
let mut bytes = self.call_data.clone();
self.additional_and_extra_params.encode_signer_payload_value_to(&mut bytes);
self.additional_and_extra_params.encode_implicit_to(&mut bytes);
blake2_256(&bytes)
}
/// Convert this [`PartialTransactionV5`] into a V5 "general" [`Transaction`].
///
/// This transaction has not been explicitly signed. Use [`Self::sign`]
/// or [`Self::sign_with_account_and_signature`] if you wish to provide a
/// signature (this is usually a necessary step).
pub fn to_transaction(&self) -> Transaction<T> {
let extrinsic = {
let mut encoded_inner = Vec::new();
// "is general" + transaction protocol version (5)
(0b01000000 + 5u8).encode_to(&mut encoded_inner);
// Encode versions for the transaction extensions
self.tx_extensions_version.encode_to(&mut encoded_inner);
// Encode the actual transaction extensions values
self.additional_and_extra_params.encode_value_to(&mut encoded_inner);
// and now, call data (remembering that it's been encoded already and just needs
// appending)
encoded_inner.extend(&self.call_data);
// now, prefix byte length:
let len = Compact(
u32::try_from(encoded_inner.len()).expect("extrinsic size expected to be <4GB"),
);
let mut encoded = Vec::new();
len.encode_to(&mut encoded);
encoded.extend(encoded_inner);
encoded
};
// Return an extrinsic ready to be submitted.
Transaction::from_bytes(extrinsic)
}
/// Convert this [`PartialTransactionV5`] into a V5 "general" [`Transaction`] with a signature.
///
/// Signing the transaction injects the signature into the transaction extension data, which is
/// why this method borrows self mutably. Signing repeatedly will override the previous
/// signature.
pub fn sign<Signer>(&mut self, signer: &Signer) -> Transaction<T>
where
Signer: SignerT<T>,
{
// Given our signer, we can sign the payload representing this extrinsic.
let signature = signer.sign(&self.signer_payload());
// Now, use the signature and "from" account to build the extrinsic.
self.sign_with_account_and_signature(&signer.account_id(), &signature)
}
/// Convert this [`PartialTransactionV5`] into a V5 "general" [`Transaction`] with a signature.
/// Prefer [`Self::sign`] if you have a [`SignerT`] instance to use.
///
/// Signing the transaction injects the signature into the transaction extension data, which is
/// why this method borrows self mutably. Signing repeatedly will override the previous
/// signature.
pub fn sign_with_account_and_signature(
&mut self,
account_id: &T::AccountId,
signature: &T::Signature,
) -> Transaction<T> {
// Inject the signature into the transaction extensions
// before constructing it.
self.additional_and_extra_params.inject_signature(account_id, signature);
self.to_transaction()
}
}
/// This represents a signed transaction that's ready to be submitted.
/// Use [`Transaction::encoded()`] or [`Transaction::into_encoded()`] to
/// get the bytes for it, or [`Transaction::hash_with()`] to hash the transaction
/// given an instance of [`Config::Hasher`].
pub struct Transaction<T> {
encoded: Encoded,
marker: core::marker::PhantomData<T>,
}
impl<T: Config> Transaction<T> {
/// Create a [`Transaction`] from some already-signed and prepared
/// extrinsic bytes,
pub fn from_bytes(tx_bytes: Vec<u8>) -> Self {
Self { encoded: Encoded(tx_bytes), marker: core::marker::PhantomData }
}
/// Calculate and return the hash of the extrinsic, based on the provided hasher.
/// If you don't have a hasher to hand, you can construct one using the metadata
/// with `T::Hasher::new(&metadata)`. This will create a hasher suitable for the
/// current chain where possible.
pub fn hash_with(&self, hasher: T::Hasher) -> HashFor<T> {
hasher.hash_of(&self.encoded)
}
/// Returns the SCALE encoded extrinsic bytes.
pub fn encoded(&self) -> &[u8] {
&self.encoded.0
}
/// Consumes this [`Transaction`] and returns the SCALE encoded
/// extrinsic bytes.
pub fn into_encoded(self) -> Vec<u8> {
self.encoded.0
}
}
+267
View File
@@ -0,0 +1,267 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module contains the trait and types used to represent
//! transactions that can be submitted.
use crate::{Metadata, error::ExtrinsicError};
use alloc::{
borrow::Cow,
boxed::Box,
string::{String, ToString},
};
use alloc::vec::Vec;
use codec::Encode;
use scale_encode::EncodeAsFields;
use scale_value::{Composite, Value, ValueDef, Variant};
/// This represents a transaction payload that can be submitted
/// to a node.
pub trait Payload {
/// Encode call data to the provided output.
fn encode_call_data_to(
&self,
metadata: &Metadata,
out: &mut Vec<u8>,
) -> Result<(), ExtrinsicError>;
/// Encode call data and return the output. This is a convenience
/// wrapper around [`Payload::encode_call_data_to`].
fn encode_call_data(&self, metadata: &Metadata) -> Result<Vec<u8>, ExtrinsicError> {
let mut v = Vec::new();
self.encode_call_data_to(metadata, &mut v)?;
Ok(v)
}
/// Returns the details needed to validate the call, which
/// include a statically generated hash, the pallet name,
/// and the call name.
fn validation_details(&self) -> Option<ValidationDetails<'_>> {
None
}
}
macro_rules! boxed_payload {
($ty:path) => {
impl<T: Payload + ?Sized> Payload for $ty {
fn encode_call_data_to(
&self,
metadata: &Metadata,
out: &mut Vec<u8>,
) -> Result<(), ExtrinsicError> {
self.as_ref().encode_call_data_to(metadata, out)
}
fn encode_call_data(&self, metadata: &Metadata) -> Result<Vec<u8>, ExtrinsicError> {
self.as_ref().encode_call_data(metadata)
}
fn validation_details(&self) -> Option<ValidationDetails<'_>> {
self.as_ref().validation_details()
}
}
};
}
boxed_payload!(Box<T>);
#[cfg(feature = "std")]
boxed_payload!(std::sync::Arc<T>);
#[cfg(feature = "std")]
boxed_payload!(std::rc::Rc<T>);
/// Details required to validate the shape of a transaction payload against some metadata.
pub struct ValidationDetails<'a> {
/// The pallet name.
pub pallet_name: &'a str,
/// The call name.
pub call_name: &'a str,
/// A hash (this is generated at compile time in our codegen)
/// to compare against the runtime code.
pub hash: [u8; 32],
}
/// A transaction payload containing some generic `CallData`.
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct DefaultPayload<CallData> {
pallet_name: Cow<'static, str>,
call_name: Cow<'static, str>,
call_data: CallData,
validation_hash: Option<[u8; 32]>,
}
/// The payload type used by static codegen.
pub type StaticPayload<Calldata> = DefaultPayload<Calldata>;
/// The type of a payload typically used for dynamic transaction payloads.
pub type DynamicPayload = DefaultPayload<Composite<()>>;
impl<CallData> DefaultPayload<CallData> {
/// Create a new [`DefaultPayload`].
pub fn new(
pallet_name: impl Into<String>,
call_name: impl Into<String>,
call_data: CallData,
) -> Self {
DefaultPayload {
pallet_name: Cow::Owned(pallet_name.into()),
call_name: Cow::Owned(call_name.into()),
call_data,
validation_hash: None,
}
}
/// Create a new [`DefaultPayload`] using static strings for the pallet and call name.
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
call_name: &'static str,
call_data: CallData,
validation_hash: [u8; 32],
) -> Self {
DefaultPayload {
pallet_name: Cow::Borrowed(pallet_name),
call_name: Cow::Borrowed(call_name),
call_data,
validation_hash: Some(validation_hash),
}
}
/// Do not validate this call prior to submitting it.
pub fn unvalidated(self) -> Self {
Self { validation_hash: None, ..self }
}
/// Returns the call data.
pub fn call_data(&self) -> &CallData {
&self.call_data
}
/// Returns the pallet name.
pub fn pallet_name(&self) -> &str {
&self.pallet_name
}
/// Returns the call name.
pub fn call_name(&self) -> &str {
&self.call_name
}
}
impl DefaultPayload<Composite<()>> {
/// Convert the dynamic `Composite` payload into a [`Value`].
/// This is useful if you want to use this as an argument for a
/// larger dynamic call that wants to use this as a nested call.
pub fn into_value(self) -> Value<()> {
let call = Value {
context: (),
value: ValueDef::Variant(Variant {
name: self.call_name.into_owned(),
values: self.call_data,
}),
};
Value::unnamed_variant(self.pallet_name, [call])
}
}
impl<CallData: EncodeAsFields> Payload for DefaultPayload<CallData> {
fn encode_call_data_to(
&self,
metadata: &Metadata,
out: &mut Vec<u8>,
) -> Result<(), ExtrinsicError> {
let pallet = metadata
.pallet_by_name(&self.pallet_name)
.ok_or_else(|| ExtrinsicError::PalletNameNotFound(self.pallet_name.to_string()))?;
let call = pallet.call_variant_by_name(&self.call_name).ok_or_else(|| {
ExtrinsicError::CallNameNotFound {
pallet_name: pallet.name().to_string(),
call_name: self.call_name.to_string(),
}
})?;
let pallet_index = pallet.call_index();
let call_index = call.index;
pallet_index.encode_to(out);
call_index.encode_to(out);
let mut fields =
call.fields.iter().map(|f| scale_encode::Field::new(f.ty.id, f.name.as_deref()));
self.call_data
.encode_as_fields_to(&mut fields, metadata.types(), out)
.map_err(ExtrinsicError::CannotEncodeCallData)?;
Ok(())
}
fn validation_details(&self) -> Option<ValidationDetails<'_>> {
self.validation_hash.map(|hash| ValidationDetails {
pallet_name: &self.pallet_name,
call_name: &self.call_name,
hash,
})
}
}
/// Construct a transaction at runtime; essentially an alias to [`DefaultPayload::new()`]
/// which provides a [`Composite`] value for the call data.
pub fn dynamic(
pallet_name: impl Into<String>,
call_name: impl Into<String>,
call_data: impl Into<Composite<()>>,
) -> DynamicPayload {
DefaultPayload::new(pallet_name, call_name, call_data.into())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Metadata;
use codec::Decode;
use scale_value::Composite;
fn test_metadata() -> Metadata {
let metadata_bytes = include_bytes!("../../../artifacts/pezkuwi_metadata_small.scale");
Metadata::decode(&mut &metadata_bytes[..]).expect("Valid metadata")
}
#[test]
fn encode_call_with_incompatible_types_returns_error() {
let metadata = test_metadata();
let incompatible_data = Composite::named([
("dest", scale_value::Value::bool(true)), // Boolean instead of MultiAddress
("value", scale_value::Value::string("not_a_number")), // String instead of u128
]);
let payload = DefaultPayload::new("Balances", "transfer_allow_death", incompatible_data);
let mut out = Vec::new();
let result = payload.encode_call_data_to(&metadata, &mut out);
assert!(result.is_err(), "Expected error when encoding with incompatible types");
}
#[test]
fn encode_call_with_valid_data_succeeds() {
let metadata = test_metadata();
// Create a valid payload to ensure our error handling doesn't break valid cases
// For MultiAddress, we'll use the Id variant with a 32-byte account
let valid_address =
scale_value::Value::unnamed_variant("Id", [scale_value::Value::from_bytes([0u8; 32])]);
let valid_data =
Composite::named([("dest", valid_address), ("value", scale_value::Value::u128(1000))]);
let payload = DefaultPayload::new("Balances", "transfer_allow_death", valid_data);
// This should succeed
let mut out = Vec::new();
let result = payload.encode_call_data_to(&metadata, &mut out);
assert!(result.is_ok(), "Expected success when encoding with valid data");
assert!(!out.is_empty(), "Expected encoded output to be non-empty");
}
}
+22
View File
@@ -0,0 +1,22 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! A library to **sub**mit e**xt**rinsics to a
//! [bizinikiwi](https://github.com/pezkuwichain/bizinikiwi) node via RPC.
use crate::Config;
/// Signing transactions requires a [`Signer`]. This is responsible for
/// providing the "from" account that the transaction is being signed by,
/// as well as actually signing a SCALE encoded payload.
pub trait Signer<T: Config> {
/// Return the "from" account ID.
fn account_id(&self) -> T::AccountId;
/// Takes a signer payload for an extrinsic, and returns a signature based on it.
///
/// Some signers may fail, for instance because the hardware on which the keys are located has
/// refused the operation.
fn sign(&self, signer_payload: &[u8]) -> T::Signature;
}
+188
View File
@@ -0,0 +1,188 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! The "default" Bizinikiwi/Pezkuwi AccountId. This is used in codegen, as well as signing related
//! bits. This doesn't contain much functionality itself, but is easy to convert to/from an
//! `sp_core::AccountId32` for instance, to gain functionality without forcing a dependency on
//! Bizinikiwi crates here.
use alloc::{format, string::String, vec, vec::Vec};
use codec::{Decode, Encode};
use serde::{Deserialize, Serialize};
use thiserror::Error as DeriveError;
/// A 32-byte cryptographic identifier. This is a simplified version of Bizinikiwi's
/// `sp_core::crypto::AccountId32`. To obtain more functionality, convert this into
/// that type.
#[derive(
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
Debug,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
scale_info::TypeInfo,
)]
pub struct AccountId32(pub [u8; 32]);
impl AsRef<[u8]> for AccountId32 {
fn as_ref(&self) -> &[u8] {
&self.0[..]
}
}
impl AsRef<[u8; 32]> for AccountId32 {
fn as_ref(&self) -> &[u8; 32] {
&self.0
}
}
impl From<[u8; 32]> for AccountId32 {
fn from(x: [u8; 32]) -> Self {
AccountId32(x)
}
}
impl AccountId32 {
// Return the ss58-check string for this key. Adapted from `sp_core::crypto`. We need this to
// serialize our account appropriately but otherwise don't care.
fn to_ss58check(&self) -> String {
// For serializing to a string to obtain the account nonce, we use the default bizinikiwi
// prefix (since we have no way to otherwise pick one). It doesn't really matter, since when
// it's deserialized back in system_accountNextIndex, we ignore this (so long as it's
// valid).
const SUBSTRATE_SS58_PREFIX: u8 = 42;
// prefix <= 63 just take up one byte at the start:
let mut v = vec![SUBSTRATE_SS58_PREFIX];
// then push the account ID bytes.
v.extend(self.0);
// then push a 2 byte checksum of what we have so far.
let r = ss58hash(&v);
v.extend(&r[0..2]);
// then encode to base58.
use base58::ToBase58;
v.to_base58()
}
// This isn't strictly needed, but to give our AccountId32 a little more usefulness, we also
// implement the logic needed to decode an AccountId32 from an SS58 encoded string. This is
// exposed via a `FromStr` impl.
fn from_ss58check(s: &str) -> Result<Self, FromSs58Error> {
const CHECKSUM_LEN: usize = 2;
let body_len = 32;
use base58::FromBase58;
let data = s.from_base58().map_err(|_| FromSs58Error::BadBase58)?;
if data.len() < 2 {
return Err(FromSs58Error::BadLength);
}
let prefix_len = match data[0] {
0..=63 => 1,
64..=127 => 2,
_ => return Err(FromSs58Error::InvalidPrefix),
};
if data.len() != prefix_len + body_len + CHECKSUM_LEN {
return Err(FromSs58Error::BadLength);
}
let hash = ss58hash(&data[0..body_len + prefix_len]);
let checksum = &hash[0..CHECKSUM_LEN];
if data[body_len + prefix_len..body_len + prefix_len + CHECKSUM_LEN] != *checksum {
// Invalid checksum.
return Err(FromSs58Error::InvalidChecksum);
}
let result = data[prefix_len..body_len + prefix_len]
.try_into()
.map_err(|_| FromSs58Error::BadLength)?;
Ok(AccountId32(result))
}
}
/// An error obtained from trying to interpret an SS58 encoded string into an AccountId32
#[derive(Clone, Copy, Eq, PartialEq, Debug, DeriveError)]
#[allow(missing_docs)]
pub enum FromSs58Error {
#[error("Base 58 requirement is violated")]
BadBase58,
#[error("Length is bad")]
BadLength,
#[error("Invalid checksum")]
InvalidChecksum,
#[error("Invalid SS58 prefix byte.")]
InvalidPrefix,
}
// We do this just to get a checksum to help verify the validity of the address in to_ss58check
fn ss58hash(data: &[u8]) -> Vec<u8> {
use blake2::{Blake2b512, Digest};
const PREFIX: &[u8] = b"SS58PRE";
let mut ctx = Blake2b512::new();
ctx.update(PREFIX);
ctx.update(data);
ctx.finalize().to_vec()
}
impl Serialize for AccountId32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_ss58check())
}
}
impl<'de> Deserialize<'de> for AccountId32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
AccountId32::from_ss58check(&String::deserialize(deserializer)?)
.map_err(|e| serde::de::Error::custom(format!("{e:?}")))
}
}
impl core::fmt::Display for AccountId32 {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "{}", self.to_ss58check())
}
}
impl core::str::FromStr for AccountId32 {
type Err = FromSs58Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
AccountId32::from_ss58check(s)
}
}
#[cfg(test)]
mod test {
use super::*;
use sp_core::{self, crypto::Ss58Codec};
use sp_keyring::sr25519::Keyring;
#[test]
fn ss58_is_compatible_with_bizinikiwi_impl() {
let keyrings = vec![Keyring::Alice, Keyring::Bob, Keyring::Charlie];
for keyring in keyrings {
let bizinikiwi_account = keyring.to_account_id();
let local_account = AccountId32(bizinikiwi_account.clone().into());
// Both should encode to ss58 the same way:
let bizinikiwi_ss58 = bizinikiwi_account.to_ss58check();
assert_eq!(bizinikiwi_ss58, local_account.to_ss58check());
// Both should decode from ss58 back to the same:
assert_eq!(
sp_core::crypto::AccountId32::from_ss58check(&bizinikiwi_ss58).unwrap(),
bizinikiwi_account
);
assert_eq!(AccountId32::from_ss58check(&bizinikiwi_ss58).unwrap(), local_account);
}
}
}
+151
View File
@@ -0,0 +1,151 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! `AccountId20` is a representation of Ethereum address derived from hashing the public key.
use alloc::{format, string::String};
use codec::{Decode, Encode};
use keccak_hash::keccak;
use serde::{Deserialize, Serialize};
use thiserror::Error as DeriveError;
#[derive(
Copy,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
Debug,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
scale_info::TypeInfo,
)]
/// Ethereum-compatible `AccountId`.
pub struct AccountId20(pub [u8; 20]);
impl AsRef<[u8]> for AccountId20 {
fn as_ref(&self) -> &[u8] {
&self.0[..]
}
}
impl AsRef<[u8; 20]> for AccountId20 {
fn as_ref(&self) -> &[u8; 20] {
&self.0
}
}
impl From<[u8; 20]> for AccountId20 {
fn from(x: [u8; 20]) -> Self {
AccountId20(x)
}
}
impl AccountId20 {
/// Convert to a public key hash
pub fn checksum(&self) -> String {
let hex_address = hex::encode(self.0);
let hash = keccak(hex_address.as_bytes());
let mut checksum_address = String::with_capacity(42);
checksum_address.push_str("0x");
for (i, ch) in hex_address.chars().enumerate() {
// Get the corresponding nibble from the hash
let nibble = (hash[i / 2] >> (if i % 2 == 0 { 4 } else { 0 })) & 0xf;
if nibble >= 8 {
checksum_address.push(ch.to_ascii_uppercase());
} else {
checksum_address.push(ch);
}
}
checksum_address
}
}
/// An error obtained from trying to interpret a hex encoded string into an AccountId20
#[derive(Clone, Copy, Eq, PartialEq, Debug, DeriveError)]
#[allow(missing_docs)]
pub enum FromChecksumError {
#[error("Length is bad")]
BadLength,
#[error("Invalid checksum")]
InvalidChecksum,
#[error("Invalid checksum prefix byte.")]
InvalidPrefix,
}
impl Serialize for AccountId20 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.checksum())
}
}
impl<'de> Deserialize<'de> for AccountId20 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
String::deserialize(deserializer)?
.parse::<AccountId20>()
.map_err(|e| serde::de::Error::custom(format!("{e:?}")))
}
}
impl core::fmt::Display for AccountId20 {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "{}", self.checksum())
}
}
impl core::str::FromStr for AccountId20 {
type Err = FromChecksumError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.len() != 42 {
return Err(FromChecksumError::BadLength);
}
if !s.starts_with("0x") {
return Err(FromChecksumError::InvalidPrefix);
}
hex::decode(&s.as_bytes()[2..])
.map_err(|_| FromChecksumError::InvalidChecksum)?
.try_into()
.map(AccountId20)
.map_err(|_| FromChecksumError::BadLength)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialisation() {
let key_hashes = vec![
"0xf24FF3a9CF04c71Dbc94D0b566f7A27B94566cac",
"0x3Cd0A705a2DC65e5b1E1205896BaA2be8A07c6e0",
"0x798d4Ba9baf0064Ec19eB4F0a1a45785ae9D6DFc",
"0x773539d4Ac0e786233D90A233654ccEE26a613D9",
"0xFf64d3F6efE2317EE2807d223a0Bdc4c0c49dfDB",
"0xC0F0f4ab324C46e55D02D0033343B4Be8A55532d",
];
for key_hash in key_hashes {
let parsed: AccountId20 = key_hash.parse().expect("Failed to parse");
let encoded = parsed.checksum();
// `encoded` should be equal to the initial key_hash
assert_eq!(encoded, key_hash);
}
}
}
+256
View File
@@ -0,0 +1,256 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Generic `scale_bits` over `bitvec`-like `BitOrder` and `BitFormat` types.
use alloc::{vec, vec::Vec};
use codec::{Compact, Input};
use core::marker::PhantomData;
use scale_bits::{
Bits,
scale::format::{Format, OrderFormat, StoreFormat},
};
use scale_decode::{IntoVisitor, TypeResolver};
/// Associates `bitvec::store::BitStore` trait with corresponding, type-erased
/// `scale_bits::StoreFormat` enum.
///
/// Used to decode bit sequences by providing `scale_bits::StoreFormat` using
/// `bitvec`-like type type parameters.
pub trait BitStore {
/// Corresponding `scale_bits::StoreFormat` value.
const FORMAT: StoreFormat;
/// Number of bits that the backing store types holds.
const BITS: u32;
}
macro_rules! impl_store {
($ty:ident, $wrapped:ty) => {
impl BitStore for $wrapped {
const FORMAT: StoreFormat = StoreFormat::$ty;
const BITS: u32 = <$wrapped>::BITS;
}
};
}
impl_store!(U8, u8);
impl_store!(U16, u16);
impl_store!(U32, u32);
impl_store!(U64, u64);
/// Associates `bitvec::order::BitOrder` trait with corresponding, type-erased
/// `scale_bits::OrderFormat` enum.
///
/// Used to decode bit sequences in runtime by providing `scale_bits::OrderFormat` using
/// `bitvec`-like type type parameters.
pub trait BitOrder {
/// Corresponding `scale_bits::OrderFormat` value.
const FORMAT: OrderFormat;
}
macro_rules! impl_order {
($ty:ident) => {
#[doc = concat!("Type-level value that corresponds to `scale_bits::OrderFormat::", stringify!($ty), "` at run-time")]
#[doc = concat!(" and `bitvec::order::BitOrder::", stringify!($ty), "` at the type level.")]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum $ty {}
impl BitOrder for $ty {
const FORMAT: OrderFormat = OrderFormat::$ty;
}
};
}
impl_order!(Lsb0);
impl_order!(Msb0);
/// Constructs a run-time format parameters based on the corresponding type-level parameters.
fn bit_format<Store: BitStore, Order: BitOrder>() -> Format {
Format { order: Order::FORMAT, store: Store::FORMAT }
}
/// `scale_bits::Bits` generic over the bit store (`u8`/`u16`/`u32`/`u64`) and bit order (LSB, MSB)
/// used for SCALE encoding/decoding. Uses `scale_bits::Bits`-default `u8` and LSB format
/// underneath.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DecodedBits<Store, Order> {
bits: Bits,
_marker: PhantomData<(Store, Order)>,
}
impl<Store, Order> DecodedBits<Store, Order> {
/// Extracts the underlying `scale_bits::Bits` value.
pub fn into_bits(self) -> Bits {
self.bits
}
/// References the underlying `scale_bits::Bits` value.
pub fn as_bits(&self) -> &Bits {
&self.bits
}
}
impl<Store, Order> core::iter::FromIterator<bool> for DecodedBits<Store, Order> {
fn from_iter<T: IntoIterator<Item = bool>>(iter: T) -> Self {
DecodedBits { bits: Bits::from_iter(iter), _marker: PhantomData }
}
}
impl<Store: BitStore, Order: BitOrder> codec::Decode for DecodedBits<Store, Order> {
fn decode<I: Input>(input: &mut I) -> Result<Self, codec::Error> {
/// Equivalent of `BitSlice::MAX_BITS` on 32bit machine.
const ARCH32BIT_BITSLICE_MAX_BITS: u32 = 0x1fff_ffff;
let Compact(bits) = <Compact<u32>>::decode(input)?;
// Otherwise it is impossible to store it on 32bit machine.
if bits > ARCH32BIT_BITSLICE_MAX_BITS {
return Err("Attempt to decode a BitVec with too many bits".into());
}
// NOTE: Replace with `bits.div_ceil(Store::BITS)` if `int_roundings` is stabilised
let elements = (bits / Store::BITS) + u32::from(bits % Store::BITS != 0);
let bytes_in_elem = Store::BITS.saturating_div(u8::BITS);
let bytes_needed = (elements * bytes_in_elem) as usize;
// NOTE: We could reduce allocations if it would be possible to directly
// decode from an `Input` type using a custom format (rather than default <u8, Lsb0>)
// for the `Bits` type.
let mut storage = codec::Encode::encode(&Compact(bits));
let prefix_len = storage.len();
storage.reserve_exact(bytes_needed);
storage.extend(vec![0; bytes_needed]);
input.read(&mut storage[prefix_len..])?;
let decoder = scale_bits::decode_using_format_from(&storage, bit_format::<Store, Order>())?;
let bits = decoder.collect::<Result<Vec<_>, _>>()?;
let bits = Bits::from_iter(bits);
Ok(DecodedBits { bits, _marker: PhantomData })
}
}
impl<Store: BitStore, Order: BitOrder> codec::Encode for DecodedBits<Store, Order> {
fn size_hint(&self) -> usize {
self.bits.size_hint()
}
fn encoded_size(&self) -> usize {
self.bits.encoded_size()
}
fn encode(&self) -> Vec<u8> {
scale_bits::encode_using_format(self.bits.iter(), bit_format::<Store, Order>())
}
}
#[doc(hidden)]
pub struct DecodedBitsVisitor<S, O, R: TypeResolver>(core::marker::PhantomData<(S, O, R)>);
impl<Store, Order, R: TypeResolver> scale_decode::Visitor for DecodedBitsVisitor<Store, Order, R> {
type Value<'scale, 'info> = DecodedBits<Store, Order>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
type_id: R::TypeId,
types: &'info R,
) -> scale_decode::visitor::DecodeAsTypeResult<
Self,
Result<Self::Value<'scale, 'info>, Self::Error>,
> {
let res =
scale_decode::visitor::decode_with_visitor(input, type_id, types, Bits::into_visitor())
.map(|bits| DecodedBits { bits, _marker: PhantomData });
scale_decode::visitor::DecodeAsTypeResult::Decoded(res)
}
}
impl<Store, Order> scale_decode::IntoVisitor for DecodedBits<Store, Order> {
type AnyVisitor<R: scale_decode::TypeResolver> = DecodedBitsVisitor<Store, Order, R>;
fn into_visitor<R: TypeResolver>() -> DecodedBitsVisitor<Store, Order, R> {
DecodedBitsVisitor(PhantomData)
}
}
impl<Store, Order> scale_encode::EncodeAsType for DecodedBits<Store, Order> {
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.bits.encode_as_type_to(type_id, types, out)
}
}
#[cfg(test)]
mod tests {
use super::*;
use core::fmt::Debug;
use bitvec::vec::BitVec;
use codec::Decode as _;
// NOTE: We don't use `bitvec::order` types in our implementation, since we
// don't want to depend on `bitvec`. Rather than reimplementing the unsafe
// trait on our types here for testing purposes, we simply convert and
// delegate to `bitvec`'s own types.
trait ToBitVec {
type Order: bitvec::order::BitOrder;
}
impl ToBitVec for Lsb0 {
type Order = bitvec::order::Lsb0;
}
impl ToBitVec for Msb0 {
type Order = bitvec::order::Msb0;
}
fn scales_like_bitvec_and_roundtrips<
'a,
Store: BitStore + bitvec::store::BitStore + PartialEq,
Order: BitOrder + ToBitVec + Debug + PartialEq,
>(
input: impl IntoIterator<Item = &'a bool>,
) where
BitVec<Store, <Order as ToBitVec>::Order>: codec::Encode + codec::Decode,
{
let input: Vec<_> = input.into_iter().copied().collect();
let decoded_bits = DecodedBits::<Store, Order>::from_iter(input.clone());
let bitvec = BitVec::<Store, <Order as ToBitVec>::Order>::from_iter(input);
let decoded_bits_encoded = codec::Encode::encode(&decoded_bits);
let bitvec_encoded = codec::Encode::encode(&bitvec);
assert_eq!(decoded_bits_encoded, bitvec_encoded);
let decoded_bits_decoded =
DecodedBits::<Store, Order>::decode(&mut &decoded_bits_encoded[..])
.expect("SCALE-encoding DecodedBits to roundtrip");
let bitvec_decoded =
BitVec::<Store, <Order as ToBitVec>::Order>::decode(&mut &bitvec_encoded[..])
.expect("SCALE-encoding BitVec to roundtrip");
assert_eq!(decoded_bits, decoded_bits_decoded);
assert_eq!(bitvec, bitvec_decoded);
}
#[test]
fn decoded_bitvec_scales_and_roundtrips() {
let test_cases = [
vec![],
vec![true],
vec![false],
vec![true, false, true],
vec![true, false, true, false, false, false, false, false, true],
[vec![true; 5], vec![false; 5], vec![true; 1], vec![false; 3]].concat(),
[vec![true; 9], vec![false; 9], vec![true; 9], vec![false; 9]].concat(),
];
for test_case in &test_cases {
scales_like_bitvec_and_roundtrips::<u8, Lsb0>(test_case);
scales_like_bitvec_and_roundtrips::<u16, Lsb0>(test_case);
scales_like_bitvec_and_roundtrips::<u32, Lsb0>(test_case);
scales_like_bitvec_and_roundtrips::<u64, Lsb0>(test_case);
scales_like_bitvec_and_roundtrips::<u8, Msb0>(test_case);
scales_like_bitvec_and_roundtrips::<u16, Msb0>(test_case);
scales_like_bitvec_and_roundtrips::<u32, Msb0>(test_case);
scales_like_bitvec_and_roundtrips::<u64, Msb0>(test_case);
}
}
}
+227
View File
@@ -0,0 +1,227 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use alloc::{format, vec::Vec};
use codec::{Decode, Encode};
use scale_decode::{
IntoVisitor, TypeResolver, Visitor,
ext::scale_type_resolver,
visitor::{
TypeIdFor,
types::{Composite, Variant},
},
};
use scale_encode::EncodeAsType;
// Dev note: This and related bits taken from `sp_runtime::generic::Era`
/// An era to describe the longevity of a transaction.
#[derive(
PartialEq,
Default,
Eq,
Clone,
Copy,
Debug,
serde::Serialize,
serde::Deserialize,
scale_info::TypeInfo,
)]
pub enum Era {
/// The transaction is valid forever. The genesis hash must be present in the signed content.
#[default]
Immortal,
/// The transaction will expire. Use [`Era::mortal`] to construct this with correct values.
///
/// When used on `FRAME`-based runtimes, `period` cannot exceed `BlockHashCount` parameter
/// of `system` module.
Mortal {
/// The number of blocks that the tx will be valid for after the checkpoint block
/// hash found in the signer payload.
period: u64,
/// The phase in the period that this transaction's lifetime begins (and, importantly,
/// implies which block hash is included in the signature material). If the `period` is
/// greater than 1 << 12, then it will be a factor of the times greater than 1<<12 that
/// `period` is.
phase: u64,
},
}
// E.g. with period == 4:
// 0 10 20 30 40
// 0123456789012345678901234567890123456789012
// |...|
// authored -/ \- expiry
// phase = 1
// n = Q(current - phase, period) + phase
impl Era {
/// Create a new era based on a period (which should be a power of two between 4 and 65536
/// inclusive) and a block number on which it should start (or, for long periods, be shortly
/// after the start).
///
/// If using `Era` in the context of `FRAME` runtime, make sure that `period`
/// does not exceed `BlockHashCount` parameter passed to `system` module, since that
/// prunes old blocks and renders transactions immediately invalid.
pub fn mortal(period: u64, current: u64) -> Self {
let period = period.checked_next_power_of_two().unwrap_or(1 << 16).clamp(4, 1 << 16);
let phase = current % period;
let quantize_factor = (period >> 12).max(1);
let quantized_phase = phase / quantize_factor * quantize_factor;
Self::Mortal { period, phase: quantized_phase }
}
}
// Both copied from `sp_runtime::generic::Era`; this is the wire interface and so
// it's really the most important bit here.
impl codec::Encode for Era {
fn encode_to<T: codec::Output + ?Sized>(&self, output: &mut T) {
match self {
Self::Immortal => output.push_byte(0),
Self::Mortal { period, phase } => {
let quantize_factor = (*period >> 12).max(1);
let encoded = (period.trailing_zeros() - 1).clamp(1, 15) as u16 |
((phase / quantize_factor) << 4) as u16;
encoded.encode_to(output);
},
}
}
}
impl codec::Decode for Era {
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
let first = input.read_byte()?;
if first == 0 {
Ok(Self::Immortal)
} else {
let encoded = first as u64 + ((input.read_byte()? as u64) << 8);
let period = 2 << (encoded % (1 << 4));
let quantize_factor = (period >> 12).max(1);
let phase = (encoded >> 4) * quantize_factor;
if period >= 4 && phase < period {
Ok(Self::Mortal { period, phase })
} else {
Err("Invalid period and phase".into())
}
}
}
}
/// Define manually how to encode an Era given some type information. Here we
/// basically check that the type we're targeting is called "Era" and then codec::Encode.
impl EncodeAsType for Era {
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
// Visit the type to check that it is an Era. This is only a rough check.
let visitor = scale_type_resolver::visitor::new((), |_, _| false)
.visit_variant(|_, path, _variants| path.last() == Some("Era"));
let is_era = types.resolve_type(type_id.clone(), visitor).unwrap_or_default();
if !is_era {
return Err(scale_encode::Error::custom_string(format!(
"Type {type_id:?} is not a valid Era type; expecting either Immortal or MortalX variant"
)));
}
// if the type looks valid then just scale encode our Era.
self.encode_to(out);
Ok(())
}
}
/// Define manually how to decode an Era given some type information. Here we check that the
/// variant we're decoding is one of the expected Era variants, and that the field is correct if so,
/// ensuring that this will fail if trying to decode something that isn't an Era.
pub struct EraVisitor<R>(core::marker::PhantomData<R>);
impl IntoVisitor for Era {
type AnyVisitor<R: TypeResolver> = EraVisitor<R>;
fn into_visitor<R: TypeResolver>() -> Self::AnyVisitor<R> {
EraVisitor(core::marker::PhantomData)
}
}
impl<R: TypeResolver> Visitor for EraVisitor<R> {
type Value<'scale, 'resolver> = Era;
type Error = scale_decode::Error;
type TypeResolver = R;
// Unwrap any newtype wrappers around the era, eg the CheckMortality extension (which actually
// has 2 fields, but scale_info seems to automatically ignore the PhantomData field). This
// allows us to decode directly from CheckMortality into Era.
fn visit_composite<'scale, 'resolver>(
self,
value: &mut Composite<'scale, 'resolver, Self::TypeResolver>,
_type_id: TypeIdFor<Self>,
) -> Result<Self::Value<'scale, 'resolver>, Self::Error> {
if value.remaining() != 1 {
return Err(scale_decode::Error::custom_string(format!(
"Expected any wrapper around Era to have exactly one field, but got {} fields",
value.remaining()
)));
}
value.decode_item(self).expect("1 field expected; checked above.")
}
fn visit_variant<'scale, 'resolver>(
self,
value: &mut Variant<'scale, 'resolver, Self::TypeResolver>,
_type_id: TypeIdFor<Self>,
) -> Result<Self::Value<'scale, 'resolver>, Self::Error> {
let variant = value.name();
// If the variant is immortal, we know the outcome.
if variant == "Immortal" {
return Ok(Era::Immortal);
}
// Otherwise, we expect a variant Mortal1..Mortal255 where the number
// here is the first byte, and the second byte is conceptually a field of this variant.
// This weird encoding is because the Era is compressed to just 1 byte if immortal and
// just 2 bytes if mortal.
//
// Note: We _could_ just assume we'll have 2 bytes to work with and decode the era directly,
// but checking the variant names ensures that the thing we think is an Era actually _is_
// one, based on the type info for it.
let first_byte = variant
.strip_prefix("Mortal")
.and_then(|s| s.parse::<u8>().ok())
.ok_or_else(|| {
scale_decode::Error::custom_string(format!(
"Expected MortalX variant, but got {variant}"
))
})?;
// We need 1 field in the MortalN variant containing the second byte.
let mortal_fields = value.fields();
if mortal_fields.remaining() != 1 {
return Err(scale_decode::Error::custom_string(format!(
"Expected Mortal{} to have one u8 field, but got {} fields",
first_byte,
mortal_fields.remaining()
)));
}
let second_byte = mortal_fields
.decode_item(u8::into_visitor())
.expect("At least one field should exist; checked above.")
.map_err(|e| {
scale_decode::Error::custom_string(format!(
"Expected mortal variant field to be u8, but: {e}"
))
})?;
// Now that we have both bytes we can decode them into the era using
// the same logic as the codec::Decode impl does.
Era::decode(&mut &[first_byte, second_byte][..]).map_err(|e| {
scale_decode::Error::custom_string(format!(
"Failed to codec::Decode Era from Mortal bytes: {e}"
))
})
}
}
+78
View File
@@ -0,0 +1,78 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Miscellaneous utility helpers.
mod account_id;
mod account_id20;
pub mod bits;
mod era;
mod multi_address;
mod multi_signature;
mod static_type;
mod unchecked_extrinsic;
mod wrapper_opaque;
mod yesnomaybe;
use alloc::{borrow::ToOwned, format, string::String, vec::Vec};
use codec::{Compact, Decode, Encode};
use derive_where::derive_where;
pub use account_id::AccountId32;
pub use account_id20::AccountId20;
pub use era::Era;
pub use multi_address::MultiAddress;
pub use multi_signature::MultiSignature;
pub use primitive_types::{H160, H256, H512};
pub use static_type::Static;
pub use unchecked_extrinsic::UncheckedExtrinsic;
pub use wrapper_opaque::WrapperKeepOpaque;
pub use yesnomaybe::{Maybe, No, NoMaybe, Yes, YesMaybe, YesNo};
/// Wraps an already encoded byte vector, prevents being encoded as a raw byte vector as part of
/// the transaction payload
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct Encoded(pub Vec<u8>);
impl codec::Encode for Encoded {
fn encode(&self) -> Vec<u8> {
self.0.to_owned()
}
}
/// Decodes a compact encoded value from the beginning of the provided bytes,
/// returning the value and any remaining bytes.
pub fn strip_compact_prefix(bytes: &[u8]) -> Result<(u64, &[u8]), codec::Error> {
let cursor = &mut &*bytes;
let val = <Compact<u64>>::decode(cursor)?;
Ok((val.0, *cursor))
}
/// A version of [`core::marker::PhantomData`] that is also Send and Sync (which is fine
/// because regardless of the generic param, it is always possible to Send + Sync this
/// 0 size type).
#[derive(Encode, Decode, scale_info::TypeInfo)]
#[derive_where(Clone, PartialEq, Debug, Eq, Default, Hash)]
#[scale_info(skip_type_params(T))]
#[doc(hidden)]
pub struct PhantomDataSendSync<T>(core::marker::PhantomData<T>);
impl<T> PhantomDataSendSync<T> {
pub fn new() -> Self {
Self(core::marker::PhantomData)
}
}
unsafe impl<T> Send for PhantomDataSendSync<T> {}
unsafe impl<T> Sync for PhantomDataSendSync<T> {}
/// This represents a key-value collection and is SCALE compatible
/// with collections like BTreeMap. This has the same type params
/// as `BTreeMap` which allows us to easily swap the two during codegen.
pub type KeyedVec<K, V> = Vec<(K, V)>;
/// A quick helper to encode some bytes to hex.
pub fn to_hex(bytes: impl AsRef<[u8]>) -> String {
format!("0x{}", hex::encode(bytes.as_ref()))
}
+45
View File
@@ -0,0 +1,45 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! The "default" Bizinikiwi/Pezkuwi Address type. This is used in codegen, as well as signing
//! related bits. This doesn't contain much functionality itself, but is easy to convert to/from an
//! `sp_runtime::MultiAddress` for instance, to gain functionality without forcing a dependency on
//! Bizinikiwi crates here.
use alloc::vec::Vec;
use codec::{Decode, Encode};
/// A multi-format address wrapper for on-chain accounts. This is a simplified version of
/// Bizinikiwi's `sp_runtime::MultiAddress`.
#[derive(
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
Debug,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
scale_info::TypeInfo,
)]
pub enum MultiAddress<AccountId, AccountIndex> {
/// It's an account ID (pubkey).
Id(AccountId),
/// It's an account index.
Index(#[codec(compact)] AccountIndex),
/// It's some arbitrary raw bytes.
Raw(Vec<u8>),
/// It's a 32 byte representation.
Address32([u8; 32]),
/// Its a 20 byte representation.
Address20([u8; 20]),
}
impl<AccountId, AccountIndex> From<AccountId> for MultiAddress<AccountId, AccountIndex> {
fn from(a: AccountId) -> Self {
Self::Id(a)
}
}
+22
View File
@@ -0,0 +1,22 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! The "default" Bizinikiwi/Pezkuwi Signature type. This is used in codegen, as well as signing
//! related bits. This doesn't contain much functionality itself, but is easy to convert to/from an
//! `sp_runtime::MultiSignature` for instance, to gain functionality without forcing a dependency on
//! Bizinikiwi crates here.
use codec::{Decode, Encode};
/// Signature container that can store known signature types. This is a simplified version of
/// `sp_runtime::MultiSignature`. To obtain more functionality, convert this into that type.
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, Debug, scale_info::TypeInfo)]
pub enum MultiSignature {
/// An Ed25519 signature.
Ed25519([u8; 64]),
/// An Sr25519 signature.
Sr25519([u8; 64]),
/// An ECDSA/SECP256k1 signature (a 512-bit value, plus 8 bits for recovery ID).
Ecdsa([u8; 65]),
}
+82
View File
@@ -0,0 +1,82 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use codec::{Decode, Encode};
use scale_decode::{IntoVisitor, TypeResolver, Visitor, visitor::DecodeAsTypeResult};
use scale_encode::EncodeAsType;
use alloc::vec::Vec;
/// If the type inside this implements [`Encode`], this will implement
/// [`scale_encode::EncodeAsType`]. If the type inside this implements [`Decode`], this will
/// implement [`scale_decode::DecodeAsType`].
///
/// In either direction, we ignore any type information and just attempt to encode/decode statically
/// via the [`Encode`] and [`Decode`] implementations. This can be useful as an adapter for types
/// which do not implement [`scale_encode::EncodeAsType`] and [`scale_decode::DecodeAsType`]
/// themselves, but it's best to avoid using it where possible as it will not take into account any
/// type information, and is thus more likely to encode or decode incorrectly.
#[derive(Debug, Encode, Decode, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]
pub struct Static<T>(pub T);
impl<T: Encode> EncodeAsType for Static<T> {
fn encode_as_type_to<R: TypeResolver>(
&self,
_type_id: R::TypeId,
_types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.0.encode_to(out);
Ok(())
}
}
pub struct StaticDecodeAsTypeVisitor<T, R>(core::marker::PhantomData<(T, R)>);
impl<T: Decode, R: TypeResolver> Visitor for StaticDecodeAsTypeVisitor<T, R> {
type Value<'scale, 'info> = Static<T>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
_type_id: R::TypeId,
_types: &'info R,
) -> DecodeAsTypeResult<Self, Result<Self::Value<'scale, 'info>, Self::Error>> {
use scale_decode::{Error, visitor::DecodeError};
let decoded = T::decode(input)
.map(Static)
.map_err(|e| Error::new(DecodeError::CodecError(e).into()));
DecodeAsTypeResult::Decoded(decoded)
}
}
impl<T: Decode> IntoVisitor for Static<T> {
type AnyVisitor<R: TypeResolver> = StaticDecodeAsTypeVisitor<T, R>;
fn into_visitor<R: TypeResolver>() -> StaticDecodeAsTypeVisitor<T, R> {
StaticDecodeAsTypeVisitor(core::marker::PhantomData)
}
}
// Make it easy to convert types into Static where required.
impl<T> From<T> for Static<T> {
fn from(value: T) -> Self {
Static(value)
}
}
// Static<T> is just a marker type and should be as transparent as possible:
impl<T> core::ops::Deref for Static<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> core::ops::DerefMut for Static<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
@@ -0,0 +1,142 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! The "default" Bizinikiwi/Pezkuwi UncheckedExtrinsic.
//! This is used in codegen for runtime API calls.
//!
//! The inner bytes represent the encoded extrinsic expected by the
//! runtime APIs. Deriving `EncodeAsType` would lead to the inner
//! bytes to be re-encoded (length prefixed).
use core::marker::PhantomData;
use codec::{Decode, Encode};
use scale_decode::{DecodeAsType, IntoVisitor, TypeResolver, Visitor, visitor::DecodeAsTypeResult};
use super::{Encoded, Static};
use alloc::vec::Vec;
/// The unchecked extrinsic from bizinikiwi.
#[derive(Clone, Debug, Eq, PartialEq, Encode)]
pub struct UncheckedExtrinsic<Address, Call, Signature, Extra>(
Static<Encoded>,
#[codec(skip)] PhantomData<(Address, Call, Signature, Extra)>,
);
impl<Address, Call, Signature, Extra> UncheckedExtrinsic<Address, Call, Signature, Extra> {
/// Construct a new [`UncheckedExtrinsic`].
pub fn new(bytes: Vec<u8>) -> Self {
Self(Static(Encoded(bytes)), PhantomData)
}
/// Get the bytes of the encoded extrinsic.
pub fn bytes(&self) -> &[u8] {
self.0.0.0.as_slice()
}
}
impl<Address, Call, Signature, Extra> Decode
for UncheckedExtrinsic<Address, Call, Signature, Extra>
{
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
// The bytes for an UncheckedExtrinsic are first a compact
// encoded length, and then the bytes following. This is the
// same encoding as a Vec, so easiest ATM is just to decode
// into that, and then encode the vec bytes to get our extrinsic
// bytes, which we save into an `Encoded` to preserve as-is.
let xt_vec: Vec<u8> = Decode::decode(input)?;
Ok(UncheckedExtrinsic::new(xt_vec))
}
}
impl<Address, Call, Signature, Extra> scale_encode::EncodeAsType
for UncheckedExtrinsic<Address, Call, Signature, Extra>
{
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.0.encode_as_type_to(type_id, types, out)
}
}
impl<Address, Call, Signature, Extra> From<Vec<u8>>
for UncheckedExtrinsic<Address, Call, Signature, Extra>
{
fn from(bytes: Vec<u8>) -> Self {
UncheckedExtrinsic::new(bytes)
}
}
impl<Address, Call, Signature, Extra> From<UncheckedExtrinsic<Address, Call, Signature, Extra>>
for Vec<u8>
{
fn from(bytes: UncheckedExtrinsic<Address, Call, Signature, Extra>) -> Self {
bytes.0.0.0
}
}
pub struct UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R: TypeResolver>(
PhantomData<(Address, Call, Signature, Extra, R)>,
);
impl<Address, Call, Signature, Extra, R: TypeResolver> Visitor
for UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R>
{
type Value<'scale, 'info> = UncheckedExtrinsic<Address, Call, Signature, Extra>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
type_id: R::TypeId,
types: &'info R,
) -> DecodeAsTypeResult<Self, Result<Self::Value<'scale, 'info>, Self::Error>> {
DecodeAsTypeResult::Decoded(Self::Value::decode_as_type(input, type_id, types))
}
}
impl<Address, Call, Signature, Extra> IntoVisitor
for UncheckedExtrinsic<Address, Call, Signature, Extra>
{
type AnyVisitor<R: TypeResolver> =
UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R>;
fn into_visitor<R: TypeResolver>()
-> UncheckedExtrinsicDecodeAsTypeVisitor<Address, Call, Signature, Extra, R> {
UncheckedExtrinsicDecodeAsTypeVisitor(PhantomData)
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use alloc::vec;
#[test]
fn unchecked_extrinsic_encoding() {
// A tx is basically some bytes with a compact length prefix; ie an encoded vec:
let tx_bytes = vec![1u8, 2, 3].encode();
let unchecked_extrinsic = UncheckedExtrinsic::<(), (), (), ()>::new(tx_bytes.clone());
let encoded_tx_bytes = unchecked_extrinsic.encode();
// The encoded representation must not alter the provided bytes.
assert_eq!(tx_bytes, encoded_tx_bytes);
// However, for decoding we expect to be able to read the extrinsic from the wire
// which would be length prefixed.
let decoded_tx = UncheckedExtrinsic::<(), (), (), ()>::decode(&mut &tx_bytes[..]).unwrap();
let decoded_tx_bytes = decoded_tx.bytes();
let encoded_tx_bytes = decoded_tx.encode();
assert_eq!(decoded_tx_bytes, encoded_tx_bytes);
// Ensure we can decode the tx and fetch only the tx bytes.
assert_eq!(vec![1, 2, 3], encoded_tx_bytes);
}
}
+221
View File
@@ -0,0 +1,221 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::PhantomDataSendSync;
use codec::{Compact, Decode, DecodeAll, Encode};
use derive_where::derive_where;
use scale_decode::{IntoVisitor, TypeResolver, Visitor, ext::scale_type_resolver::visitor};
use scale_encode::EncodeAsType;
use alloc::{format, vec::Vec};
/// A wrapper for any type `T` which implement encode/decode in a way compatible with `Vec<u8>`.
/// [`WrapperKeepOpaque`] stores the type only in its opaque format, aka as a `Vec<u8>`. To
/// access the real type `T` [`Self::try_decode`] needs to be used.
// Dev notes:
//
// - This is adapted from [here](https://github.com/pezkuwichain/bizinikiwi/blob/master/frame/support/src/traits/misc.rs).
// - The encoded bytes will be a compact encoded length followed by that number of bytes.
// - However, the TypeInfo describes the type as a composite with first a compact encoded length and
// next the type itself.
// [`Encode`] and [`Decode`] impls will "just work" to take this into a `Vec<u8>`, but we need a
// custom [`EncodeAsType`] and [`Visitor`] implementation to encode and decode based on TypeInfo.
#[derive(Encode, Decode)]
#[derive_where(Debug, Clone, PartialEq, Eq, Default, Hash)]
pub struct WrapperKeepOpaque<T> {
data: Vec<u8>,
_phantom: PhantomDataSendSync<T>,
}
impl<T> WrapperKeepOpaque<T> {
/// Try to decode the wrapped type from the inner `data`.
///
/// Returns `None` if the decoding failed.
pub fn try_decode(&self) -> Option<T>
where
T: Decode,
{
T::decode_all(&mut &self.data[..]).ok()
}
/// Returns the length of the encoded `T`.
pub fn encoded_len(&self) -> usize {
self.data.len()
}
/// Returns the encoded data.
pub fn encoded(&self) -> &[u8] {
&self.data
}
/// Create from the given encoded `data`.
pub fn from_encoded(data: Vec<u8>) -> Self {
Self { data, _phantom: PhantomDataSendSync::new() }
}
/// Create from some raw value by encoding it.
pub fn from_value(value: T) -> Self
where
T: Encode,
{
Self { data: value.encode(), _phantom: PhantomDataSendSync::new() }
}
}
impl<T> EncodeAsType for WrapperKeepOpaque<T> {
fn encode_as_type_to<R: TypeResolver>(
&self,
type_id: R::TypeId,
types: &R,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
use scale_encode::error::{Error, ErrorKind, Kind};
let ctx = (type_id.clone(), out);
let visitor = visitor::new(ctx, |(type_id, _out), _| {
// Check that the target shape lines up: any other shape but composite is wrong.
Err(Error::new(ErrorKind::WrongShape {
actual: Kind::Struct,
expected_id: format!("{type_id:?}"),
}))
})
.visit_composite(|(_type_id, out), _path, _fields| {
self.data.encode_to(out);
Ok(())
});
types
.resolve_type(type_id.clone(), visitor)
.map_err(|_| Error::new(ErrorKind::TypeNotFound(format!("{type_id:?}"))))?
}
}
pub struct WrapperKeepOpaqueVisitor<T, R>(core::marker::PhantomData<(T, R)>);
impl<T, R: TypeResolver> Visitor for WrapperKeepOpaqueVisitor<T, R> {
type Value<'scale, 'info> = WrapperKeepOpaque<T>;
type Error = scale_decode::Error;
type TypeResolver = R;
fn visit_composite<'scale, 'info>(
self,
value: &mut scale_decode::visitor::types::Composite<'scale, 'info, R>,
_type_id: R::TypeId,
) -> Result<Self::Value<'scale, 'info>, Self::Error> {
use scale_decode::{
error::{Error, ErrorKind},
visitor::DecodeError,
};
if value.name() != Some("WrapperKeepOpaque") {
return Err(Error::new(ErrorKind::VisitorDecodeError(DecodeError::TypeResolvingError(
format!("Expected a type named 'WrapperKeepOpaque', got: {:?}", value.name()),
))));
}
if value.remaining() != 2 {
return Err(Error::new(ErrorKind::WrongLength {
actual_len: value.remaining(),
expected_len: 2,
}));
}
// The field to decode is a compact len followed by bytes. Decode the length, then grab the
// bytes.
let Compact(len) =
value.decode_item(Compact::<u32>::into_visitor()).expect("length checked")?;
let field = value.next().expect("length checked")?;
// Sanity check that the compact length we decoded lines up with the number of bytes encoded
// in the next field.
if field.bytes().len() != len as usize {
return Err(Error::custom_str(
"WrapperTypeKeepOpaque compact encoded length doesn't line up with encoded byte len",
));
}
Ok(WrapperKeepOpaque { data: field.bytes().to_vec(), _phantom: PhantomDataSendSync::new() })
}
}
impl<T> IntoVisitor for WrapperKeepOpaque<T> {
type AnyVisitor<R: TypeResolver> = WrapperKeepOpaqueVisitor<T, R>;
fn into_visitor<R: TypeResolver>() -> WrapperKeepOpaqueVisitor<T, R> {
WrapperKeepOpaqueVisitor(core::marker::PhantomData)
}
}
#[cfg(test)]
mod test {
use scale_decode::DecodeAsType;
use alloc::vec;
use super::*;
// Copied from https://github.com/pezkuwichain/bizinikiwi/blob/master/frame/support/src/traits/misc.rs
// and used for tests to check that we can work with the expected TypeInfo without needing to
// import the frame_support crate, which has quite a lot of dependencies.
impl<T: scale_info::TypeInfo + 'static> scale_info::TypeInfo for WrapperKeepOpaque<T> {
type Identity = Self;
fn type_info() -> scale_info::Type {
use scale_info::{Path, Type, TypeParameter, build::Fields, meta_type};
Type::builder()
.path(Path::new("WrapperKeepOpaque", module_path!()))
.type_params(vec![TypeParameter::new("T", Some(meta_type::<T>()))])
.composite(
Fields::unnamed()
.field(|f| f.compact::<u32>())
.field(|f| f.ty::<T>().type_name("T")),
)
}
}
/// Given a type definition, return type ID and registry representing it.
fn make_type<T: scale_info::TypeInfo + 'static>() -> (u32, scale_info::PortableRegistry) {
let m = scale_info::MetaType::new::<T>();
let mut types = scale_info::Registry::new();
let id = types.register_type(&m);
let portable_registry: scale_info::PortableRegistry = types.into();
(id.id, portable_registry)
}
fn roundtrips_like_scale_codec<T>(t: T)
where
T: EncodeAsType
+ DecodeAsType
+ Encode
+ Decode
+ PartialEq
+ core::fmt::Debug
+ scale_info::TypeInfo
+ 'static,
{
let (type_id, types) = make_type::<T>();
let scale_codec_encoded = t.encode();
let encode_as_type_encoded = t.encode_as_type(type_id, &types).unwrap();
assert_eq!(scale_codec_encoded, encode_as_type_encoded, "encoded bytes should match");
let decode_as_type_bytes = &mut &*scale_codec_encoded;
let decoded_as_type = T::decode_as_type(decode_as_type_bytes, type_id, &types)
.expect("decode-as-type decodes");
let decode_scale_codec_bytes = &mut &*scale_codec_encoded;
let decoded_scale_codec = T::decode(decode_scale_codec_bytes).expect("scale-codec decodes");
assert!(decode_as_type_bytes.is_empty(), "no bytes should remain in decode-as-type impl");
assert!(decode_scale_codec_bytes.is_empty(), "no bytes should remain in codec-decode impl");
assert_eq!(decoded_as_type, decoded_scale_codec, "decoded values should match");
}
#[test]
fn wrapper_keep_opaque_roundtrips_ok() {
roundtrips_like_scale_codec(WrapperKeepOpaque::from_value(123u64));
roundtrips_like_scale_codec(WrapperKeepOpaque::from_value(true));
roundtrips_like_scale_codec(WrapperKeepOpaque::from_value(vec![1u8, 2, 3, 4]));
}
}
+82
View File
@@ -0,0 +1,82 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
/// A unit marker enum.
pub enum Yes {}
/// A unit marker enum.
pub enum Maybe {}
/// A unit marker enum.
pub enum No {}
/// This is implemented for [`Yes`] and [`No`] and
/// allows us to check at runtime which of these types is present.
pub trait YesNo {
/// [`Yes`]
fn is_yes() -> bool {
false
}
/// [`No`]
fn is_no() -> bool {
false
}
}
impl YesNo for Yes {
fn is_yes() -> bool {
true
}
}
impl YesNo for No {
fn is_no() -> bool {
true
}
}
/// This is implemented for [`Yes`] and [`Maybe`] and
/// allows us to check at runtime which of these types is present.
pub trait YesMaybe {
/// [`Yes`]
fn is_yes() -> bool {
false
}
/// [`Maybe`]
fn is_maybe() -> bool {
false
}
}
impl YesMaybe for Yes {
fn is_yes() -> bool {
true
}
}
impl YesMaybe for Maybe {
fn is_maybe() -> bool {
true
}
}
/// This is implemented for [`No`] and [`Maybe`] and
/// allows us to check at runtime which of these types is present.
pub trait NoMaybe {
/// [`No`]
fn is_no() -> bool {
false
}
/// [`Maybe`]
fn is_maybe() -> bool {
false
}
}
impl NoMaybe for No {
fn is_no() -> bool {
true
}
}
impl NoMaybe for Maybe {
fn is_maybe() -> bool {
true
}
}
+77
View File
@@ -0,0 +1,77 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! Encode View Function payloads, decode the associated values returned from them, and validate
//! static View Function payloads.
pub mod payload;
use crate::{Metadata, error::ViewFunctionError};
use alloc::{string::ToString, vec::Vec};
use payload::Payload;
use scale_decode::IntoVisitor;
/// Run the validation logic against some View Function payload you'd like to use. Returns `Ok(())`
/// if the payload is valid (or if it's not possible to check since the payload has no validation
/// hash). Return an error if the payload was not valid or something went wrong trying to validate
/// it (ie the View Function in question do not exist at all)
pub fn validate<P: Payload>(payload: P, metadata: &Metadata) -> Result<(), ViewFunctionError> {
let Some(hash) = payload.validation_hash() else {
return Ok(());
};
let pallet_name = payload.pallet_name();
let function_name = payload.function_name();
let view_function = metadata
.pallet_by_name(pallet_name)
.ok_or_else(|| ViewFunctionError::PalletNotFound(pallet_name.to_string()))?
.view_function_by_name(function_name)
.ok_or_else(|| ViewFunctionError::ViewFunctionNotFound {
pallet_name: pallet_name.to_string(),
function_name: function_name.to_string(),
})?;
if hash != view_function.hash() { Err(ViewFunctionError::IncompatibleCodegen) } else { Ok(()) }
}
/// The name of the Runtime API call which can execute
pub const CALL_NAME: &str = "RuntimeViewFunction_execute_view_function";
/// Encode the bytes that will be passed to the "execute_view_function" Runtime API call,
/// to execute the View Function represented by the given payload.
pub fn call_args<P: Payload>(
payload: P,
metadata: &Metadata,
) -> Result<Vec<u8>, ViewFunctionError> {
let inputs = frame_decode::view_functions::encode_view_function_inputs(
payload.pallet_name(),
payload.function_name(),
payload.args(),
metadata,
metadata.types(),
)
.map_err(ViewFunctionError::CouldNotEncodeInputs)?;
Ok(inputs)
}
/// Decode the value bytes at the location given by the provided View Function payload.
pub fn decode_value<P: Payload>(
bytes: &mut &[u8],
payload: P,
metadata: &Metadata,
) -> Result<P::ReturnType, ViewFunctionError> {
let value = frame_decode::view_functions::decode_view_function_response(
payload.pallet_name(),
payload.function_name(),
bytes,
metadata,
metadata.types(),
P::ReturnType::into_visitor(),
)
.map_err(ViewFunctionError::CouldNotDecodeResponse)?;
Ok(value)
}
+161
View File
@@ -0,0 +1,161 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! This module contains the trait and types used to represent
//! View Function calls that can be made.
use alloc::{borrow::Cow, string::String};
use core::marker::PhantomData;
use derive_where::derive_where;
use frame_decode::view_functions::IntoEncodableValues;
use scale_decode::DecodeAsType;
/// This represents a View Function payload that can call into the runtime of node.
///
/// # Components
///
/// - associated return type
///
/// Resulting bytes of the call are interpreted into this type.
///
/// - query ID
///
/// The ID used to identify in the runtime which view function to call.
///
/// - encoded arguments
///
/// Each argument of the View Function must be scale-encoded.
pub trait Payload {
/// Type of the arguments for this call.
type ArgsType: IntoEncodableValues;
/// The return type of the function call.
type ReturnType: DecodeAsType;
/// The View Function pallet name.
fn pallet_name(&self) -> &str;
/// The View Function function name.
fn function_name(&self) -> &str;
/// The arguments.
fn args(&self) -> &Self::ArgsType;
/// Returns the statically generated validation hash.
fn validation_hash(&self) -> Option<[u8; 32]> {
None
}
}
// A reference to a payload is a valid payload.
impl<P: Payload + ?Sized> Payload for &'_ P {
type ArgsType = P::ArgsType;
type ReturnType = P::ReturnType;
fn pallet_name(&self) -> &str {
P::pallet_name(*self)
}
fn function_name(&self) -> &str {
P::function_name(*self)
}
fn args(&self) -> &Self::ArgsType {
P::args(*self)
}
fn validation_hash(&self) -> Option<[u8; 32]> {
P::validation_hash(*self)
}
}
/// A View Function payload containing the generic argument data
/// and interpreting the result of the call as `ReturnType`.
///
/// This can be created from static values (ie those generated
/// via the `subxt` macro) or dynamic values via [`dynamic`].
#[derive_where(Clone, Debug, Eq, Ord, PartialEq, PartialOrd; ArgsType)]
pub struct StaticPayload<ArgsType, ReturnType> {
pallet_name: Cow<'static, str>,
function_name: Cow<'static, str>,
args: ArgsType,
validation_hash: Option<[u8; 32]>,
_marker: PhantomData<ReturnType>,
}
/// A dynamic View Function payload.
pub type DynamicPayload<ArgsType, ReturnType> = StaticPayload<ArgsType, ReturnType>;
impl<ArgsType: IntoEncodableValues, ReturnType: DecodeAsType> Payload
for StaticPayload<ArgsType, ReturnType>
{
type ArgsType = ArgsType;
type ReturnType = ReturnType;
fn pallet_name(&self) -> &str {
&self.pallet_name
}
fn function_name(&self) -> &str {
&self.function_name
}
fn args(&self) -> &Self::ArgsType {
&self.args
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.validation_hash
}
}
impl<ReturnTy, ArgsType> StaticPayload<ArgsType, ReturnTy> {
/// Create a new [`StaticPayload`] for a View Function call.
pub fn new(
pallet_name: impl Into<String>,
function_name: impl Into<String>,
args: ArgsType,
) -> Self {
StaticPayload {
pallet_name: pallet_name.into().into(),
function_name: function_name.into().into(),
args,
validation_hash: None,
_marker: PhantomData,
}
}
/// Create a new static [`StaticPayload`] for a View Function call
/// using static function name and scale-encoded argument data.
///
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
function_name: &'static str,
args: ArgsType,
hash: [u8; 32],
) -> StaticPayload<ArgsType, ReturnTy> {
StaticPayload {
pallet_name: Cow::Borrowed(pallet_name),
function_name: Cow::Borrowed(function_name),
args,
validation_hash: Some(hash),
_marker: core::marker::PhantomData,
}
}
/// Do not validate this call prior to submitting it.
pub fn unvalidated(self) -> Self {
Self { validation_hash: None, ..self }
}
}
/// Create a new [`DynamicPayload`] to call a View Function.
pub fn dynamic<ArgsType, ReturnType>(
pallet_name: impl Into<String>,
function_name: impl Into<String>,
args: ArgsType,
) -> DynamicPayload<ArgsType, ReturnType> {
DynamicPayload::new(pallet_name, function_name, args)
}
+77
View File
@@ -0,0 +1,77 @@
[package]
name = "pezkuwi-subxt-lightclient"
version.workspace = true
authors.workspace = true
edition.workspace = true
rust-version.workspace = true
publish = true
license.workspace = true
readme = "../README.md"
repository.workspace = true
documentation.workspace = true
homepage.workspace = true
description = "Light Client for chain interaction"
keywords = ["blockchain", "parity", "bizinikiwi"]
[lints]
workspace = true
[features]
default = ["native"]
# Enable this for native (ie non web/wasm builds).
# Exactly 1 of "web" and "native" is expected.
native = [
"smoldot-light/std",
"tokio/rt",
]
# Enable this for web/wasm builds.
# Exactly 1 of "web" and "native" is expected.
web = [
"getrandom/js",
"smoldot/std",
# For the light-client platform.
"futures-timer/wasm-bindgen",
"pin-project",
"wasm-bindgen-futures",
"web-time",
# For websocket.
"js-sys",
"send_wrapper",
"wasm-bindgen",
"web-sys",
]
[dependencies]
futures = { workspace = true, features = ["async-await"] }
futures-util = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["default", "raw_value"] }
smoldot-light = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["sync"] }
tokio-stream = { workspace = true }
tracing = { workspace = true }
# Only needed for web
futures-timer = { workspace = true, optional = true }
getrandom = { workspace = true, optional = true }
js-sys = { workspace = true, optional = true }
pin-project = { workspace = true, optional = true }
send_wrapper = { workspace = true, optional = true }
smoldot = { workspace = true, optional = true }
wasm-bindgen = { workspace = true, optional = true }
wasm-bindgen-futures = { workspace = true, optional = true }
web-sys = { workspace = true, optional = true }
web-time = { workspace = true, optional = true }
[package.metadata.docs.rs]
default-features = true
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.playground]
default-features = true
+486
View File
@@ -0,0 +1,486 @@
// Copyright 2019-2024 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::{JsonRpcError, LightClientRpcError, rpc::RpcResponse, shared_client::SharedClient};
use futures::{FutureExt, stream::StreamExt};
use serde_json::value::RawValue;
use smoldot_light::platform::PlatformRef;
use std::{collections::HashMap, str::FromStr};
use tokio::sync::{mpsc, oneshot};
use tokio_stream::wrappers::UnboundedReceiverStream;
const LOG_TARGET: &str = "subxt-light-client-background-task";
/// Response from [`BackgroundTaskHandle::request()`].
pub type MethodResponse = Result<Box<RawValue>, LightClientRpcError>;
/// Response from [`BackgroundTaskHandle::subscribe()`].
pub type SubscriptionResponse = Result<
(SubscriptionId, mpsc::UnboundedReceiver<Result<Box<RawValue>, JsonRpcError>>),
LightClientRpcError,
>;
/// Type of subscription IDs we can get back.
pub type SubscriptionId = String;
/// Message protocol between the front-end client that submits the RPC requests
/// and the background task which fetches responses from Smoldot. Hidden behind
/// the [`BackgroundTaskHandle`].
#[derive(Debug)]
enum Message {
/// The RPC method request.
Request {
/// The method of the request.
method: String,
/// The parameters of the request.
params: Option<Box<RawValue>>,
/// Channel used to send back the method response.
sender: oneshot::Sender<MethodResponse>,
},
/// The RPC subscription (pub/sub) request.
Subscription {
/// The method of the request.
method: String,
/// The method to unsubscribe.
unsubscribe_method: String,
/// The parameters of the request.
params: Option<Box<RawValue>>,
/// Channel used to send back the subscription response.
sender: oneshot::Sender<SubscriptionResponse>,
},
}
/// A handle to communicate with the background task.
#[derive(Clone, Debug)]
pub struct BackgroundTaskHandle {
to_backend: mpsc::UnboundedSender<Message>,
}
impl BackgroundTaskHandle {
/// Make an RPC request via the background task.
pub async fn request(&self, method: String, params: Option<Box<RawValue>>) -> MethodResponse {
let (tx, rx) = oneshot::channel();
self.to_backend
.send(Message::Request { method, params, sender: tx })
.map_err(|_e| LightClientRpcError::BackgroundTaskDropped)?;
match rx.await {
Err(_e) => Err(LightClientRpcError::BackgroundTaskDropped),
Ok(response) => response,
}
}
/// Subscribe to some RPC method via the background task.
pub async fn subscribe(
&self,
method: String,
params: Option<Box<RawValue>>,
unsubscribe_method: String,
) -> SubscriptionResponse {
let (tx, rx) = oneshot::channel();
self.to_backend
.send(Message::Subscription { method, params, unsubscribe_method, sender: tx })
.map_err(|_e| LightClientRpcError::BackgroundTaskDropped)?;
match rx.await {
Err(_e) => Err(LightClientRpcError::BackgroundTaskDropped),
Ok(response) => response,
}
}
}
/// A background task which runs with [`BackgroundTask::run()`] and manages messages
/// coming to/from Smoldot.
#[allow(clippy::type_complexity)]
pub struct BackgroundTask<TPlatform: PlatformRef, TChain> {
channels: BackgroundTaskChannels<TPlatform>,
data: BackgroundTaskData<TPlatform, TChain>,
}
impl<TPlatform: PlatformRef, TChain> BackgroundTask<TPlatform, TChain> {
/// Constructs a new [`BackgroundTask`].
pub(crate) fn new(
client: SharedClient<TPlatform, TChain>,
chain_id: smoldot_light::ChainId,
from_back: smoldot_light::JsonRpcResponses<TPlatform>,
) -> (BackgroundTask<TPlatform, TChain>, BackgroundTaskHandle) {
let (tx, rx) = mpsc::unbounded_channel();
let bg_task = BackgroundTask {
channels: BackgroundTaskChannels {
from_front: UnboundedReceiverStream::new(rx),
from_back,
},
data: BackgroundTaskData {
client,
chain_id,
last_request_id: 0,
pending_subscriptions: HashMap::new(),
requests: HashMap::new(),
subscriptions: HashMap::new(),
},
};
let bg_handle = BackgroundTaskHandle { to_backend: tx };
(bg_task, bg_handle)
}
/// Run the background task, which:
/// - Forwards messages/subscription requests to Smoldot from the front end.
/// - Forwards responses back from Smoldot to the front end.
pub async fn run(self) {
let chain_id = self.data.chain_id;
let mut channels = self.channels;
let mut data = self.data;
loop {
tokio::pin! {
let from_front_fut = channels.from_front.next().fuse();
let from_back_fut = channels.from_back.next().fuse();
}
futures::select! {
// Message coming from the front end/client.
front_message = from_front_fut => {
let Some(message) = front_message else {
tracing::trace!(target: LOG_TARGET, "Subxt channel closed");
break;
};
tracing::trace!(
target: LOG_TARGET,
"Received register message {:?}",
message
);
data.handle_requests(message).await;
},
// Message coming from Smoldot.
back_message = from_back_fut => {
let Some(back_message) = back_message else {
tracing::trace!(target: LOG_TARGET, "Smoldot RPC responses channel closed");
break;
};
tracing::trace!(
target: LOG_TARGET,
"Received smoldot RPC chain {chain_id:?} result {}",
trim_message(&back_message),
);
data.handle_rpc_response(back_message);
}
}
}
tracing::trace!(target: LOG_TARGET, "Task closed");
}
}
struct BackgroundTaskChannels<TPlatform: PlatformRef> {
/// Messages sent into this background task from the front end.
from_front: UnboundedReceiverStream<Message>,
/// Messages sent into the background task from Smoldot.
from_back: smoldot_light::JsonRpcResponses<TPlatform>,
}
struct BackgroundTaskData<TPlatform: PlatformRef, TChain> {
/// A smoldot light client that can be shared.
client: SharedClient<TPlatform, TChain>,
/// Knowing the chain ID helps with debugging, but isn't otherwise necessary.
chain_id: smoldot_light::ChainId,
/// Know which Id to use next for new requests/subscriptions.
last_request_id: usize,
/// Map the request ID of a RPC method to the frontend `Sender`.
requests: HashMap<usize, oneshot::Sender<MethodResponse>>,
/// Subscription calls first need to make a plain RPC method
/// request to obtain the subscription ID.
///
/// The RPC method request is made in the background and the response should
/// not be sent back to the user.
/// Map the request ID of a RPC method to the frontend `Sender`.
pending_subscriptions: HashMap<usize, PendingSubscription>,
/// Map the subscription ID to the frontend `Sender`.
///
/// The subscription ID is entirely generated by the node (smoldot). Therefore, it is
/// possible for two distinct subscriptions of different chains to have the same subscription
/// ID.
subscriptions: HashMap<String, ActiveSubscription>,
}
/// The state needed to resolve the subscription ID and send
/// back the response to frontend.
struct PendingSubscription {
/// Send the method response ID back to the user.
///
/// It contains the subscription ID if successful, or an JSON RPC error object.
response_sender: oneshot::Sender<SubscriptionResponse>,
/// The unsubscribe method to call when the user drops the receiver
/// part of the channel.
unsubscribe_method: String,
}
/// The state of the subscription.
struct ActiveSubscription {
/// Channel to send the subscription notifications back to frontend.
notification_sender: mpsc::UnboundedSender<Result<Box<RawValue>, JsonRpcError>>,
/// The unsubscribe method to call when the user drops the receiver
/// part of the channel.
unsubscribe_method: String,
}
fn trim_message(s: &str) -> &str {
const MAX_SIZE: usize = 512;
if s.len() < MAX_SIZE {
return s;
}
match s.char_indices().nth(MAX_SIZE) {
None => s,
Some((idx, _)) => &s[..idx],
}
}
impl<TPlatform: PlatformRef, TChain> BackgroundTaskData<TPlatform, TChain> {
/// Fetch and increment the request ID.
fn next_id(&mut self) -> usize {
self.last_request_id = self.last_request_id.wrapping_add(1);
self.last_request_id
}
/// Handle the registration messages received from the user.
async fn handle_requests(&mut self, message: Message) {
match message {
Message::Request { method, params, sender } => {
let id = self.next_id();
let chain_id = self.chain_id;
let params = match &params {
Some(params) => params.get(),
None => "null",
};
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{id}", "method":"{method}","params":{params}}}"#
);
self.requests.insert(id, sender);
tracing::trace!(target: LOG_TARGET, "Tracking request id={id} chain={chain_id:?}");
let result = self.client.json_rpc_request(request, chain_id);
if let Err(err) = result {
tracing::warn!(
target: LOG_TARGET,
"Cannot send RPC request to lightclient {:?}",
err.to_string()
);
let sender = self.requests.remove(&id).expect("Channel is inserted above; qed");
// Send the error back to frontend.
if sender.send(Err(LightClientRpcError::SmoldotError(err.to_string()))).is_err()
{
tracing::warn!(
target: LOG_TARGET,
"Cannot send RPC request error to id={id}",
);
}
} else {
tracing::trace!(target: LOG_TARGET, "Submitted to smoldot request with id={id}");
}
},
Message::Subscription { method, unsubscribe_method, params, sender } => {
let id = self.next_id();
let chain_id = self.chain_id;
// For subscriptions we need to make a plain RPC request to the subscription method.
// The server will return as a result the subscription ID.
let params = match &params {
Some(params) => params.get(),
None => "null",
};
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{id}", "method":"{method}","params":{params}}}"#
);
tracing::trace!(target: LOG_TARGET, "Tracking subscription request id={id} chain={chain_id:?}");
let pending_subscription =
PendingSubscription { response_sender: sender, unsubscribe_method };
self.pending_subscriptions.insert(id, pending_subscription);
let result = self.client.json_rpc_request(request, chain_id);
if let Err(err) = result {
tracing::warn!(
target: LOG_TARGET,
"Cannot send RPC request to lightclient {:?}",
err.to_string()
);
let subscription_id_state = self
.pending_subscriptions
.remove(&id)
.expect("Channels are inserted above; qed");
// Send the error back to frontend.
if subscription_id_state
.response_sender
.send(Err(LightClientRpcError::SmoldotError(err.to_string())))
.is_err()
{
tracing::warn!(
target: LOG_TARGET,
"Cannot send RPC request error to id={id}",
);
}
} else {
tracing::trace!(target: LOG_TARGET, "Submitted to smoldot subscription request with id={id}");
}
},
};
}
/// Parse the response received from the light client and sent it to the appropriate user.
fn handle_rpc_response(&mut self, response: String) {
let chain_id = self.chain_id;
tracing::trace!(target: LOG_TARGET, "Received from smoldot response='{}' chain={chain_id:?}", trim_message(&response));
match RpcResponse::from_str(&response) {
Ok(RpcResponse::Method { id, result }) => {
let Ok(id) = id.parse::<usize>() else {
tracing::warn!(target: LOG_TARGET, "Cannot send response. Id={id} chain={chain_id:?} is not a valid number");
return;
};
// Send the response back.
if let Some(sender) = self.requests.remove(&id) {
if sender.send(Ok(result)).is_err() {
tracing::warn!(
target: LOG_TARGET,
"Cannot send method response to id={id} chain={chain_id:?}",
);
}
} else if let Some(pending_subscription) = self.pending_subscriptions.remove(&id) {
let Ok(sub_id) = serde_json::from_str::<SubscriptionId>(result.get()) else {
tracing::warn!(
target: LOG_TARGET,
"Subscription id='{result}' chain={chain_id:?} is not a valid string",
);
return;
};
tracing::trace!(target: LOG_TARGET, "Received subscription id={sub_id} chain={chain_id:?}");
let (sub_tx, sub_rx) = mpsc::unbounded_channel();
// Send the method response and a channel to receive notifications back.
if pending_subscription
.response_sender
.send(Ok((sub_id.clone(), sub_rx)))
.is_err()
{
tracing::warn!(
target: LOG_TARGET,
"Cannot send subscription ID response to id={id} chain={chain_id:?}",
);
return;
}
// Store the other end of the notif channel to send future subscription
// notifications to.
self.subscriptions.insert(
sub_id,
ActiveSubscription {
notification_sender: sub_tx,
unsubscribe_method: pending_subscription.unsubscribe_method,
},
);
} else {
tracing::warn!(
target: LOG_TARGET,
"Response id={id} chain={chain_id:?} is not tracked",
);
}
},
Ok(RpcResponse::MethodError { id, error }) => {
let Ok(id) = id.parse::<usize>() else {
tracing::warn!(target: LOG_TARGET, "Cannot send error. Id={id} chain={chain_id:?} is not a valid number");
return;
};
if let Some(sender) = self.requests.remove(&id) {
if sender
.send(Err(LightClientRpcError::JsonRpcError(JsonRpcError(error))))
.is_err()
{
tracing::warn!(
target: LOG_TARGET,
"Cannot send method response to id={id} chain={chain_id:?}",
);
}
} else if let Some(subscription_id_state) = self.pending_subscriptions.remove(&id) {
if subscription_id_state
.response_sender
.send(Err(LightClientRpcError::JsonRpcError(JsonRpcError(error))))
.is_err()
{
tracing::warn!(
target: LOG_TARGET,
"Cannot send method response to id {id} chain={chain_id:?}",
);
}
}
},
Ok(RpcResponse::Notification { method, subscription_id, result }) => {
let Some(active_subscription) = self.subscriptions.get_mut(&subscription_id) else {
tracing::warn!(
target: LOG_TARGET,
"Subscription response id={subscription_id} chain={chain_id:?} method={method} is not tracked",
);
return;
};
if active_subscription.notification_sender.send(Ok(result)).is_err() {
self.unsubscribe(&subscription_id, chain_id);
}
},
Ok(RpcResponse::NotificationError { method, subscription_id, error }) => {
let Some(active_subscription) = self.subscriptions.get_mut(&subscription_id) else {
tracing::warn!(
target: LOG_TARGET,
"Subscription error id={subscription_id} chain={chain_id:?} method={method} is not tracked",
);
return;
};
if active_subscription.notification_sender.send(Err(JsonRpcError(error))).is_err() {
self.unsubscribe(&subscription_id, chain_id);
}
},
Err(err) => {
tracing::warn!(target: LOG_TARGET, "cannot decode RPC response {:?}", err);
},
}
}
// Unsubscribe from a subscription.
fn unsubscribe(&mut self, subscription_id: &str, chain_id: smoldot_light::ChainId) {
let Some(active_subscription) = self.subscriptions.remove(subscription_id) else {
// Subscription doesn't exist so nothing more to do.
return;
};
// Build a call to unsubscribe from this method.
let unsub_id = self.next_id();
let request = format!(
r#"{{"jsonrpc":"2.0","id":"{}", "method":"{}","params":["{}"]}}"#,
unsub_id, active_subscription.unsubscribe_method, subscription_id
);
// Submit it.
if let Err(err) = self.client.json_rpc_request(request, chain_id) {
tracing::warn!(
target: LOG_TARGET,
"Failed to unsubscribe id={subscription_id} chain={chain_id:?} method={:?} err={err:?}", active_subscription.unsubscribe_method
);
} else {
tracing::debug!(target: LOG_TARGET,"Unsubscribe id={subscription_id} chain={chain_id:?} method={:?}", active_subscription.unsubscribe_method);
}
}
}
+65
View File
@@ -0,0 +1,65 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use serde_json::Value;
use std::borrow::Cow;
/// Something went wrong building chain config.
#[non_exhaustive]
#[derive(thiserror::Error, Debug)]
pub enum ChainConfigError {
/// The provided chain spec is the wrong shape.
#[error("Invalid chain spec format")]
InvalidSpecFormat,
}
/// Configuration to connect to a chain.
pub struct ChainConfig<'a> {
// The chain spec to use.
chain_spec: Cow<'a, str>,
}
impl<'a> From<&'a str> for ChainConfig<'a> {
fn from(chain_spec: &'a str) -> Self {
ChainConfig::chain_spec(chain_spec)
}
}
impl From<String> for ChainConfig<'_> {
fn from(chain_spec: String) -> Self {
ChainConfig::chain_spec(chain_spec)
}
}
impl<'a> ChainConfig<'a> {
/// Construct a chain config from a chain spec.
pub fn chain_spec(chain_spec: impl Into<Cow<'a, str>>) -> Self {
ChainConfig { chain_spec: chain_spec.into() }
}
/// Set the bootnodes to the given ones.
pub fn set_bootnodes<S: AsRef<str>>(
self,
bootnodes: impl IntoIterator<Item = S>,
) -> Result<Self, ChainConfigError> {
let mut chain_spec_json: Value = serde_json::from_str(&self.chain_spec)
.map_err(|_e| ChainConfigError::InvalidSpecFormat)?;
if let Value::Object(map) = &mut chain_spec_json {
let bootnodes =
bootnodes.into_iter().map(|s| Value::String(s.as_ref().to_owned())).collect();
map.insert("bootNodes".to_string(), Value::Array(bootnodes));
} else {
return Err(ChainConfigError::InvalidSpecFormat);
}
Ok(ChainConfig { chain_spec: Cow::Owned(chain_spec_json.to_string()) })
}
// Used internally to fetch the chain spec back out.
pub(crate) fn as_chain_spec(&self) -> &str {
&self.chain_spec
}
}
+258
View File
@@ -0,0 +1,258 @@
// Copyright 2019-2025 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
//! A wrapper around [`smoldot_light`] which provides an light client capable of connecting
//! to Bizinikiwi based chains.
#![deny(missing_docs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[cfg(any(
all(feature = "web", feature = "native"),
not(any(feature = "web", feature = "native"))
))]
compile_error!("subxt-lightclient: exactly one of the 'web' and 'native' features should be used.");
mod platform;
mod shared_client;
// mod receiver;
mod background;
mod chain_config;
mod rpc;
use background::{BackgroundTask, BackgroundTaskHandle};
use futures::Stream;
use platform::DefaultPlatform;
use serde_json::value::RawValue;
use shared_client::SharedClient;
use std::future::Future;
use tokio::sync::mpsc;
pub use chain_config::{ChainConfig, ChainConfigError};
/// Things that can go wrong when constructing the [`LightClient`].
#[derive(Debug, thiserror::Error)]
pub enum LightClientError {
/// Error encountered while adding the chain to the light-client.
#[error("Failed to add the chain to the light client: {0}.")]
AddChainError(String),
}
/// Things that can go wrong calling methods of [`LightClientRpc`].
#[derive(Debug, thiserror::Error)]
pub enum LightClientRpcError {
/// Error response from the JSON-RPC server.
#[error(transparent)]
JsonRpcError(JsonRpcError),
/// Smoldot could not handle the RPC call.
#[error("Smoldot could not handle the RPC call: {0}.")]
SmoldotError(String),
/// Background task dropped.
#[error("The background task was dropped.")]
BackgroundTaskDropped,
}
/// An error response from the JSON-RPC server (ie smoldot) in response to
/// a method call or as a subscription notification.
#[derive(Debug, thiserror::Error)]
#[error("RPC Error: {0}.")]
pub struct JsonRpcError(Box<RawValue>);
impl JsonRpcError {
/// Attempt to deserialize this error into some type.
pub fn try_deserialize<'a, T: serde::de::Deserialize<'a>>(
&'a self,
) -> Result<T, serde_json::Error> {
serde_json::from_str(self.0.get())
}
}
/// This represents a single light client connection to the network. Instantiate
/// it with [`LightClient::relay_chain()`] to communicate with a relay chain, and
/// then call [`LightClient::parachain()`] to establish connections to parachains.
#[derive(Clone)]
pub struct LightClient {
client: SharedClient<DefaultPlatform>,
relay_chain_id: smoldot_light::ChainId,
}
impl LightClient {
/// Given a chain spec, establish a connection to a relay chain. Any subsequent calls to
/// [`LightClient::parachain()`] will set this as the relay chain.
///
/// # Panics
///
/// The panic behaviour depends on the feature flag being used:
///
/// ## Native
///
/// Panics when called outside of a `tokio` runtime context.
///
/// ## Web
///
/// If smoldot panics, then the promise created will be leaked. For more details, see
/// <https://docs.rs/wasm-bindgen-futures/latest/wasm_bindgen_futures/fn.future_to_promise.html>.
pub fn relay_chain<'a>(
chain_config: impl Into<ChainConfig<'a>>,
) -> Result<(Self, LightClientRpc), LightClientError> {
let mut client = smoldot_light::Client::new(platform::build_platform());
let chain_config = chain_config.into();
let chain_spec = chain_config.as_chain_spec();
let config = smoldot_light::AddChainConfig {
specification: chain_spec,
json_rpc: smoldot_light::AddChainConfigJsonRpc::Enabled {
max_pending_requests: u32::MAX.try_into().unwrap(),
max_subscriptions: u32::MAX,
},
database_content: "",
potential_relay_chains: std::iter::empty(),
user_data: (),
};
let added_chain = client
.add_chain(config)
.map_err(|err| LightClientError::AddChainError(err.to_string()))?;
let relay_chain_id = added_chain.chain_id;
let rpc_responses =
added_chain.json_rpc_responses.expect("Light client RPC configured; qed");
let shared_client: SharedClient<_> = client.into();
let light_client_rpc =
LightClientRpc::new_raw(shared_client.clone(), relay_chain_id, rpc_responses);
let light_client = Self { client: shared_client, relay_chain_id };
Ok((light_client, light_client_rpc))
}
/// Given a chain spec, establish a connection to a parachain.
///
/// # Panics
///
/// The panic behaviour depends on the feature flag being used:
///
/// ## Native
///
/// Panics when called outside of a `tokio` runtime context.
///
/// ## Web
///
/// If smoldot panics, then the promise created will be leaked. For more details, see
/// <https://docs.rs/wasm-bindgen-futures/latest/wasm_bindgen_futures/fn.future_to_promise.html>.
pub fn parachain<'a>(
&self,
chain_config: impl Into<ChainConfig<'a>>,
) -> Result<LightClientRpc, LightClientError> {
let chain_config = chain_config.into();
let chain_spec = chain_config.as_chain_spec();
let config = smoldot_light::AddChainConfig {
specification: chain_spec,
json_rpc: smoldot_light::AddChainConfigJsonRpc::Enabled {
max_pending_requests: u32::MAX.try_into().unwrap(),
max_subscriptions: u32::MAX,
},
database_content: "",
potential_relay_chains: std::iter::once(self.relay_chain_id),
user_data: (),
};
let added_chain = self
.client
.add_chain(config)
.map_err(|err| LightClientError::AddChainError(err.to_string()))?;
let chain_id = added_chain.chain_id;
let rpc_responses =
added_chain.json_rpc_responses.expect("Light client RPC configured; qed");
Ok(LightClientRpc::new_raw(self.client.clone(), chain_id, rpc_responses))
}
}
/// This represents a single RPC connection to a specific chain, and is constructed by calling
/// one of the methods on [`LightClient`]. Using this, you can make RPC requests to the chain.
#[derive(Clone, Debug)]
pub struct LightClientRpc {
handle: BackgroundTaskHandle,
}
impl LightClientRpc {
// Dev note: this would provide a "low level" interface if one is needed.
// Do we actually need to provide this, or can we entirely hide Smoldot?
pub(crate) fn new_raw<TPlat, TChain>(
client: impl Into<SharedClient<TPlat, TChain>>,
chain_id: smoldot_light::ChainId,
rpc_responses: smoldot_light::JsonRpcResponses<TPlat>,
) -> Self
where
TPlat: smoldot_light::platform::PlatformRef + Send + 'static,
TChain: Send + 'static,
{
let (background_task, background_handle) =
BackgroundTask::new(client.into(), chain_id, rpc_responses);
// For now we spawn the background task internally, but later we can expose
// methods to give this back to the user so that they can exert backpressure.
spawn(async move { background_task.run().await });
LightClientRpc { handle: background_handle }
}
/// Make an RPC request to a chain, getting back a result.
pub async fn request(
&self,
method: String,
params: Option<Box<RawValue>>,
) -> Result<Box<RawValue>, LightClientRpcError> {
self.handle.request(method, params).await
}
/// Subscribe to some RPC method, getting back a stream of notifications.
pub async fn subscribe(
&self,
method: String,
params: Option<Box<RawValue>>,
unsub: String,
) -> Result<LightClientRpcSubscription, LightClientRpcError> {
let (id, notifications) = self.handle.subscribe(method, params, unsub).await?;
Ok(LightClientRpcSubscription { id, notifications })
}
}
/// A stream of notifications handed back when [`LightClientRpc::subscribe`] is called.
pub struct LightClientRpcSubscription {
notifications: mpsc::UnboundedReceiver<Result<Box<RawValue>, JsonRpcError>>,
id: String,
}
impl LightClientRpcSubscription {
/// Return the subscription ID
pub fn id(&self) -> &str {
&self.id
}
}
impl Stream for LightClientRpcSubscription {
type Item = Result<Box<RawValue>, JsonRpcError>;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
self.notifications.poll_recv(cx)
}
}
/// A quick helper to spawn a task that works for WASM.
fn spawn<F: Future + Send + 'static>(future: F) {
#[cfg(feature = "native")]
tokio::spawn(async move {
future.await;
});
#[cfg(feature = "web")]
wasm_bindgen_futures::spawn_local(async move {
future.await;
});
}

Some files were not shown because too many files have changed in this diff Show More