Use scale-encode and scale-decode to encode and decode based on metadata (#842)

* WIP EncodeAsType and DecodeAsType

* remove silly cli experiment code

* Get things finally compiling with EncodeAsType and DecodeAsType

* update codegen test and WrapperKeepOpaque proper impl (in case it shows up in codegen)

* fix tests

* accomodate scale-value changes

* starting to migrate to EncodeAsType/DecodeAsType

* static event decoding and tx encoding to use DecodeAsFields/EncodeAsFields

* some tidy up and add decode(skip) attrs where needed

* fix root event decoding

* #[codec(skip)] will do, and combine map_key stuff into storage_address since it's all specific to that

* fmt and clippy

* update Cargo.lock

* remove patched scale-encode

* bump scale-encode to 0.1 and remove unused dep in testing crate

* update deps and use released scale-decode

* update scale-value to latest to remove git branch

* Apply suggestions from code review

Co-authored-by: Alexandru Vasile <60601340+lexnv@users.noreply.github.com>

* remove sorting in derives/attr generation; spit them out in order given

* re-add derive sorting; it's a hashmap

* StaticTxPayload and DynamicTxPayload rolled into single Payload struct

* StaticStorageAddress and DynamicStorageAddress into single Address struct

* Fix storage address byte retrieval

* StaticConstantAddress and DynamicConstantAddress => Address

* Simplify storage codegen to fix test

* Add comments

* Alias to RuntimeEvent rather than making another, and prep for substituting call type

* remove unnecessary clone

* Fix docs and failing UI test

* root_bytes -> to_root_bytes

* document error case in StorageClient::address_bytes()

---------

Co-authored-by: Alexandru Vasile <60601340+lexnv@users.noreply.github.com>
This commit is contained in:
James Wilson
2023-03-21 15:31:13 +00:00
committed by GitHub
parent c9527abaa8
commit c63ff6ec6d
50 changed files with 9965 additions and 6262 deletions
Generated
+173 -166
View File
@@ -94,9 +94,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.69"
version = "1.0.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
[[package]]
name = "array-bytes"
@@ -106,9 +106,9 @@ checksum = "f52f63c5c1316a16a4b35eaac8b76a98248961a533f061684cb2a7cb0eafb6c6"
[[package]]
name = "arrayref"
version = "0.3.6"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545"
[[package]]
name = "arrayvec"
@@ -130,23 +130,22 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9"
[[package]]
name = "async-lock"
version = "2.6.0"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685"
checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7"
dependencies = [
"event-listener",
"futures-lite",
]
[[package]]
name = "async-trait"
version = "0.1.64"
version = "0.1.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2"
checksum = "86ea188f25f0255d8f92797797c97ebf5631fa88178beb1a46fdf5622c9a00e4"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.4",
]
[[package]]
@@ -201,9 +200,9 @@ checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
[[package]]
name = "basic-toml"
version = "0.1.1"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e819b667739967cd44d308b8c7b71305d8bb0729ac44a248aa08f33d01950b4"
checksum = "5c0de75129aa8d0cceaf750b89013f0e08804d6ec61416da787b35ad0d7cddf1"
dependencies = [
"serde",
]
@@ -282,9 +281,9 @@ dependencies = [
[[package]]
name = "block-buffer"
version = "0.10.3"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array 0.14.6",
]
@@ -360,9 +359,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.23"
version = "0.4.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f"
checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b"
dependencies = [
"iana-time-zone",
"num-integer",
@@ -417,7 +416,7 @@ checksum = "42dfd32784433290c51d92c438bb72ea5063797fc3cc9a21a8c4346bebbb2098"
dependencies = [
"bitflags 2.0.2",
"clap_derive",
"clap_lex 0.3.2",
"clap_lex 0.3.3",
"is-terminal",
"once_cell",
"strsim",
@@ -448,9 +447,9 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.3.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "350b9cf31731f9957399229e9b2adc51eeabdfbe9d71d9a0552275fd12710d09"
checksum = "033f6b7a4acb1f358c742aaca805c939ee73b4c6209ae4318ec7aca81c42e646"
dependencies = [
"os_str_bytes",
]
@@ -591,9 +590,9 @@ dependencies = [
[[package]]
name = "crossbeam-channel"
version = "0.5.6"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c"
dependencies = [
"cfg-if",
"crossbeam-utils",
@@ -601,9 +600,9 @@ dependencies = [
[[package]]
name = "crossbeam-deque"
version = "0.8.2"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
dependencies = [
"cfg-if",
"crossbeam-epoch",
@@ -612,22 +611,22 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
version = "0.9.13"
version = "0.9.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset 0.7.1",
"memoffset 0.8.0",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.14"
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
dependencies = [
"cfg-if",
]
@@ -706,9 +705,9 @@ dependencies = [
[[package]]
name = "cxx"
version = "1.0.91"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62"
checksum = "a9c00419335c41018365ddf7e4d5f1c12ee3659ddcf3e01974650ba1de73d038"
dependencies = [
"cc",
"cxxbridge-flags",
@@ -718,9 +717,9 @@ dependencies = [
[[package]]
name = "cxx-build"
version = "1.0.91"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690"
checksum = "fb8307ad413a98fff033c8545ecf133e3257747b3bae935e7602aab8aa92d4ca"
dependencies = [
"cc",
"codespan-reporting",
@@ -728,24 +727,24 @@ dependencies = [
"proc-macro2",
"quote",
"scratch",
"syn 1.0.109",
"syn 2.0.4",
]
[[package]]
name = "cxxbridge-flags"
version = "1.0.91"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf"
checksum = "edc52e2eb08915cb12596d29d55f0b5384f00d697a646dbd269b6ecb0fbd9d31"
[[package]]
name = "cxxbridge-macro"
version = "1.0.91"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892"
checksum = "631569015d0d8d54e6c241733f944042623ab6df7bc3be7466874b05fcdb1c5f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.4",
]
[[package]]
@@ -835,7 +834,7 @@ version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
dependencies = [
"block-buffer 0.10.3",
"block-buffer 0.10.4",
"crypto-common",
"subtle",
]
@@ -969,15 +968,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "fastrand"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
dependencies = [
"instant",
]
[[package]]
name = "fixed-hash"
version = "0.8.0"
@@ -1072,21 +1062,6 @@ version = "0.3.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89d422fa3cbe3b40dca574ab087abb5bc98258ea57eea3fd6f1fa7162c778b91"
[[package]]
name = "futures-lite"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48"
dependencies = [
"fastrand",
"futures-core",
"futures-io",
"memchr",
"parking",
"pin-project-lite",
"waker-fn",
]
[[package]]
name = "futures-macro"
version = "0.3.27"
@@ -1424,9 +1399,9 @@ checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
[[package]]
name = "hyper"
version = "0.14.24"
version = "0.14.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c"
checksum = "cc5e554ff619822309ffd57d8734d77cd5ce6238bc956f037ea06c58238c9899"
dependencies = [
"bytes",
"futures-channel",
@@ -1464,16 +1439,16 @@ dependencies = [
[[package]]
name = "iana-time-zone"
version = "0.1.53"
version = "0.1.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
checksum = "0c17cc76786e99f8d2f055c11159e7f0091c42474dcc3189fbab96072e873e6d"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
"winapi",
"windows",
]
[[package]]
@@ -1548,15 +1523,6 @@ dependencies = [
"serde",
]
[[package]]
name = "instant"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
]
[[package]]
name = "integer-sqrt"
version = "0.1.5"
@@ -1579,7 +1545,6 @@ dependencies = [
"scale-info",
"sp-core",
"sp-keyring",
"sp-runtime",
"subxt",
"subxt-codegen",
"syn 1.0.109",
@@ -1593,19 +1558,20 @@ dependencies = [
[[package]]
name = "io-lifetimes"
version = "1.0.5"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3"
checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb"
dependencies = [
"hermit-abi 0.3.1",
"libc",
"windows-sys 0.45.0",
]
[[package]]
name = "is-terminal"
version = "0.4.4"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857"
checksum = "8687c819457e979cc940d09cb16e42a1bf70aa6b60a549de6d3a62a0ee90c69e"
dependencies = [
"hermit-abi 0.3.1",
"io-lifetimes",
@@ -1624,9 +1590,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.5"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
[[package]]
name = "js-sys"
@@ -1748,9 +1714,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.139"
version = "0.2.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c"
[[package]]
name = "libm"
@@ -1884,9 +1850,9 @@ dependencies = [
[[package]]
name = "memoffset"
version = "0.7.1"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
dependencies = [
"autocfg",
]
@@ -2071,9 +2037,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "os_str_bytes"
version = "6.4.1"
version = "6.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee"
checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267"
[[package]]
name = "output_vt100"
@@ -2129,12 +2095,6 @@ version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1ad0aff30c1da14b1254fcb2af73e1fa9a28670e584a626f53a369d0e157304"
[[package]]
name = "parking"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
[[package]]
name = "parking_lot"
version = "0.12.1"
@@ -2160,9 +2120,9 @@ dependencies = [
[[package]]
name = "paste"
version = "1.0.11"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79"
[[package]]
name = "pbkdf2"
@@ -2419,9 +2379,9 @@ dependencies = [
[[package]]
name = "rayon"
version = "1.6.1"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
dependencies = [
"either",
"rayon-core",
@@ -2429,9 +2389,9 @@ dependencies = [
[[package]]
name = "rayon-core"
version = "1.10.2"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b"
checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -2450,22 +2410,22 @@ dependencies = [
[[package]]
name = "ref-cast"
version = "1.0.14"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c78fb8c9293bcd48ef6fce7b4ca950ceaf21210de6e105a883ee280c0f7b9ed"
checksum = "f43faa91b1c8b36841ee70e97188a869d37ae21759da6846d4be66de5bf7b12c"
dependencies = [
"ref-cast-impl",
]
[[package]]
name = "ref-cast-impl"
version = "1.0.14"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f9c0c92af03644e4806106281fe2e068ac5bc0ae74a707266d06ea27bccee5f"
checksum = "8d2275aab483050ab2a7364c1a46604865ee7d6906684e08db0f090acf74f9e7"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.4",
]
[[package]]
@@ -2529,9 +2489,9 @@ checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6"
[[package]]
name = "rustix"
version = "0.36.8"
version = "0.36.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644"
checksum = "db4165c9963ab29e422d6c26fbc1d37f15bace6b2810221f9d925023480fcf0e"
dependencies = [
"bitflags 1.3.2",
"errno",
@@ -2576,15 +2536,15 @@ dependencies = [
[[package]]
name = "rustversion"
version = "1.0.11"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70"
checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06"
[[package]]
name = "ryu"
version = "1.0.12"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
[[package]]
name = "same-file"
@@ -2608,16 +2568,58 @@ dependencies = [
[[package]]
name = "scale-decode"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d823d4be477fc33321f93d08fb6c2698273d044f01362dc27573a750deb7c233"
checksum = "c7e5527e4b3bf079d4c0b2f253418598c380722ba37ef20fac9088081407f2b6"
dependencies = [
"parity-scale-codec",
"primitive-types",
"scale-bits",
"scale-decode-derive",
"scale-info",
"thiserror",
]
[[package]]
name = "scale-decode-derive"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b38741b2f78e4391b94eac6b102af0f6ea2b0f7fe65adb55d7f4004f507854db"
dependencies = [
"darling",
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "scale-encode"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15546e5efbb45f0fc2291f7e202dee8623274c5d8bbfdf9c6886cc8b44a7ced3"
dependencies = [
"parity-scale-codec",
"primitive-types",
"scale-bits",
"scale-encode-derive",
"scale-info",
"thiserror",
]
[[package]]
name = "scale-encode-derive"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd983cf0a9effd76138554ead18a6de542d1af175ac12fd5e91836c5c0268082"
dependencies = [
"darling",
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "scale-info"
version = "2.3.1"
@@ -2646,15 +2648,16 @@ dependencies = [
[[package]]
name = "scale-value"
version = "0.6.0"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16a5e7810815bd295da73e4216d1dfbced3c7c7c7054d70fa5f6e4c58123fff4"
checksum = "11f549769261561e6764218f847e500588f9a79a289de49ce92f9e26642a3574"
dependencies = [
"either",
"frame-metadata",
"parity-scale-codec",
"scale-bits",
"scale-decode",
"scale-encode",
"scale-info",
"serde",
"thiserror",
@@ -2707,9 +2710,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "scratch"
version = "1.0.3"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2"
checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
[[package]]
name = "sct"
@@ -2779,22 +2782,22 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0"
[[package]]
name = "serde"
version = "1.0.155"
version = "1.0.158"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71f2b4817415c6d4210bfe1c7bfcf4801b2d904cb4d0e1a8fdb651013c9e86b8"
checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.155"
version = "1.0.158"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d071a94a3fac4aff69d023a7f411e33f40f3483f8c5190b1953822b6b76d7630"
checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.4",
]
[[package]]
@@ -2899,9 +2902,9 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "socket2"
version = "0.4.7"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd"
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
dependencies = [
"libc",
"winapi",
@@ -3393,6 +3396,7 @@ dependencies = [
"bitvec",
"blake2",
"derivative",
"either",
"frame-metadata",
"futures",
"getrandom 0.2.8",
@@ -3404,6 +3408,7 @@ dependencies = [
"primitive-types",
"scale-bits",
"scale-decode",
"scale-encode",
"scale-info",
"scale-value",
"serde",
@@ -3509,9 +3514,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.2"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59d3276aee1fa0c33612917969b5172b5be2db051232a6e4826f1a1a9191b045"
checksum = "2c622ae390c9302e214c31013517c2061ecb2699935882c60a9b37f82f8625ae"
dependencies = [
"proc-macro2",
"quote",
@@ -3586,7 +3591,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.2",
"syn 2.0.4",
]
[[package]]
@@ -3706,9 +3711,9 @@ checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622"
[[package]]
name = "toml_edit"
version = "0.19.4"
version = "0.19.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a1eb0622d28f4b9c90adc4ea4b2b46b47663fde9ac5fafcb14a1369d5508825"
checksum = "dc18466501acd8ac6a3f615dd29a3438f8ca6bb3b19537138b3106e575621274"
dependencies = [
"indexmap",
"toml_datetime",
@@ -3851,9 +3856,9 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
[[package]]
name = "trybuild"
version = "1.0.79"
version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db3115bddce1b5f52dd4b5e0ec8298a66ce733e4cc6759247dc2d1c11508ec38"
checksum = "501dbdbb99861e4ab6b60eb6a7493956a9defb644fd034bc4a5ef27c693c8a3a"
dependencies = [
"basic-toml",
"glob 0.3.1",
@@ -3907,15 +3912,15 @@ dependencies = [
[[package]]
name = "unicode-bidi"
version = "0.3.10"
version = "0.3.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58"
checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
[[package]]
name = "unicode-ident"
version = "1.0.6"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
[[package]]
name = "unicode-normalization"
@@ -3990,20 +3995,13 @@ dependencies = [
"glob 0.2.11",
]
[[package]]
name = "waker-fn"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
[[package]]
name = "walkdir"
version = "2.3.2"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
dependencies = [
"same-file",
"winapi",
"winapi-util",
]
@@ -4341,6 +4339,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.42.0"
@@ -4367,9 +4374,9 @@ dependencies = [
[[package]]
name = "windows-targets"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7"
checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
@@ -4382,51 +4389,51 @@ dependencies = [
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
name = "windows_i686_gnu"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
name = "windows_i686_msvc"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.1"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
name = "winnow"
version = "0.3.3"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "faf09497b8f8b5ac5d3bb4d05c0a99be20f26fd3d5f2db7b0716e946d5103658"
checksum = "23d020b441f92996c80d94ae9166e8501e59c7bb56121189dc9eab3bd8216966"
dependencies = [
"memchr",
]
@@ -4448,9 +4455,9 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
[[package]]
name = "yap"
version = "0.7.2"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fc77f52dc9e9b10d55d3f4462c3b7fc393c4f17975d641542833ab2d3bc26ef"
checksum = "e2a7eb6d82a11e4d0b8e6bda8347169aff4ccd8235d039bba7c47482d977dcf7"
[[package]]
name = "zeroize"
+2 -2
View File
@@ -104,8 +104,8 @@ pub fn generate_calls(
pub fn #fn_name(
&self,
#( #call_fn_args, )*
) -> #crate_path::tx::StaticTxPayload<#struct_name> {
#crate_path::tx::StaticTxPayload::new(
) -> #crate_path::tx::Payload<#struct_name> {
#crate_path::tx::Payload::new_static(
#pallet_name,
#call_name,
#struct_name { #( #call_args, )* },
+2 -2
View File
@@ -73,8 +73,8 @@ pub fn generate_constants(
Ok(quote! {
#docs
pub fn #fn_name(&self) -> #crate_path::constants::StaticConstantAddress<#crate_path::metadata::DecodeStaticType<#return_ty>> {
#crate_path::constants::StaticConstantAddress::new(
pub fn #fn_name(&self) -> #crate_path::constants::Address<#return_ty> {
#crate_path::constants::Address::new_static(
#pallet_name,
#constant_name,
[#(#constant_hash,)*]
+65 -32
View File
@@ -53,6 +53,9 @@ use syn::parse_quote;
/// Error returned when the Codegen cannot generate the runtime API.
#[derive(Debug, thiserror::Error)]
pub enum CodegenError {
/// The given metadata type could not be found.
#[error("Could not find type with ID {0} in the type registry; please raise a support issue.")]
TypeNotFound(u32),
/// Cannot fetch the metadata bytes.
#[error("Failed to fetch metadata, make sure that you're pointing at a node which is providing V14 metadata: {0}")]
Fetch(#[from] FetchMetadataError),
@@ -80,12 +83,6 @@ pub enum CodegenError {
/// Metadata for storage could not be found.
#[error("Metadata for storage entry {0}_{1} could not be found. Make sure you are providing a valid metadata V14")]
MissingStorageMetadata(String, String),
/// StorageNMap should have N hashers.
#[error("Number of hashers ({0}) does not equal 1 for StorageMap, or match number of fields ({1}) for StorageNMap. Make sure you are providing a valid metadata V14")]
MismatchHashers(usize, usize),
/// Expected to find one hasher for StorageMap.
#[error("No hasher found for single key. Make sure you are providing a valid metadata V14")]
MissingHasher,
/// Metadata for call could not be found.
#[error("Metadata for call entry {0}_{1} could not be found. Make sure you are providing a valid metadata V14")]
MissingCallMetadata(String, String),
@@ -319,13 +316,12 @@ impl RuntimeGenerator {
) -> Result<TokenStream2, CodegenError> {
let item_mod_attrs = item_mod.attrs.clone();
let item_mod_ir = ir::ItemMod::try_from(item_mod)?;
let default_derives = derives.default_derives();
let type_gen = TypeGenerator::new(
&self.metadata.types,
"runtime_types",
type_substitutes,
derives.clone(),
derives,
crate_path.clone(),
should_gen_docs,
);
@@ -343,6 +339,28 @@ impl RuntimeGenerator {
})
.collect::<Vec<_>>();
// Get the path to the `Runtime` struct. We assume that the same path contains
// RuntimeCall and RuntimeEvent.
let runtime_type_id = self.metadata.ty.id();
let runtime_path_segments = self
.metadata
.types
.resolve(runtime_type_id)
.ok_or(CodegenError::TypeNotFound(runtime_type_id))?
.path()
.namespace()
.iter()
.map(|part| syn::PathSegment::from(format_ident!("{}", part)));
let runtime_path_suffix = syn::Path {
leading_colon: None,
segments: syn::punctuated::Punctuated::from_iter(runtime_path_segments),
};
let runtime_path = if runtime_path_suffix.segments.is_empty() {
quote!(#types_mod_ident)
} else {
quote!(#types_mod_ident::#runtime_path_suffix)
};
// Pallet names and their length are used to create PALLETS array.
// The array is used to identify the pallets composing the metadata for
// validation of just those pallets.
@@ -407,26 +425,24 @@ impl RuntimeGenerator {
})
.collect::<Result<Vec<_>, CodegenError>>()?;
let outer_event_variants = self.metadata.pallets.iter().filter_map(|p| {
let variant_name = format_ident!("{}", p.name);
let mod_name = format_ident!("{}", p.name.to_string().to_snake_case());
let index = proc_macro2::Literal::u8_unsuffixed(p.index);
let root_event_if_arms = self.metadata.pallets.iter().filter_map(|p| {
let variant_name_str = &p.name;
let variant_name = format_ident!("{}", variant_name_str);
let mod_name = format_ident!("{}", variant_name_str.to_string().to_snake_case());
p.event.as_ref().map(|_| {
// An 'if' arm for the RootEvent impl to match this variant name:
quote! {
#[codec(index = #index)]
#variant_name(#mod_name::Event),
if pallet_name == #variant_name_str {
return Ok(Event::#variant_name(#mod_name::Event::decode_with_metadata(
&mut &*pallet_bytes,
pallet_ty,
metadata
)?));
}
}
})
});
let outer_event = quote! {
#default_derives
pub enum Event {
#( #outer_event_variants )*
}
};
let mod_ident = &item_mod_ir.ident;
let pallets_with_constants: Vec<_> = pallets_with_mod_names
.iter()
@@ -456,22 +472,36 @@ impl RuntimeGenerator {
#[allow(dead_code, unused_imports, non_camel_case_types)]
#[allow(clippy::all)]
pub mod #mod_ident {
// Preserve any Rust items that were previously defined in the adorned module
// Preserve any Rust items that were previously defined in the adorned module.
#( #rust_items ) *
// Make it easy to access the root via `root_mod` at different levels:
use super::#mod_ident as root_mod;
// Make it easy to access the root items via `root_mod` at different levels
// without reaching out of this module.
#[allow(unused_imports)]
mod root_mod {
pub use super::*;
}
// Identify the pallets composing the static metadata by name.
pub static PALLETS: [&str; #pallet_names_len] = [ #(#pallet_names,)* ];
#outer_event
#( #modules )*
#types_mod
/// The statically generated runtime call type.
pub type Call = #runtime_path::RuntimeCall;
/// The default error type returned when there is a runtime issue,
/// exposed here for ease of use.
/// The error type returned when there is a runtime issue.
pub type DispatchError = #types_mod_ident::sp_runtime::DispatchError;
// Make the runtime event type easily accessible, and impl RootEvent to help decode into it.
pub type Event = #runtime_path::RuntimeEvent;
impl #crate_path::events::RootEvent for Event {
fn root_event(pallet_bytes: &[u8], pallet_name: &str, pallet_ty: u32, metadata: &#crate_path::Metadata) -> Result<Self, #crate_path::Error> {
use #crate_path::metadata::DecodeWithMetadata;
#( #root_event_if_arms )*
Err(#crate_path::ext::scale_decode::Error::custom(format!("Pallet name '{}' not found in root Event enum", pallet_name)).into())
}
}
pub fn constants() -> ConstantsApi {
ConstantsApi
}
@@ -512,14 +542,17 @@ impl RuntimeGenerator {
}
/// check whether the Client you are using is aligned with the statically generated codegen.
pub fn validate_codegen<T: ::subxt::Config, C: ::subxt::client::OfflineClientT<T>>(client: &C) -> Result<(), ::subxt::error::MetadataError> {
pub fn validate_codegen<T: #crate_path::Config, C: #crate_path::client::OfflineClientT<T>>(client: &C) -> Result<(), #crate_path::error::MetadataError> {
let runtime_metadata_hash = client.metadata().metadata_hash(&PALLETS);
if runtime_metadata_hash != [ #(#metadata_hash,)* ] {
Err(::subxt::error::MetadataError::IncompatibleMetadata)
Err(#crate_path::error::MetadataError::IncompatibleMetadata)
} else {
Ok(())
}
}
#( #modules )*
#types_mod
}
})
}
+28 -59
View File
@@ -12,7 +12,6 @@ use frame_metadata::{
StorageEntryMetadata,
StorageEntryModifier,
StorageEntryType,
StorageHasher,
};
use heck::ToSnakeCase as _;
use proc_macro2::TokenStream as TokenStream2;
@@ -84,31 +83,12 @@ fn generate_storage_entry_fns(
crate_path: &CratePath,
should_gen_docs: bool,
) -> Result<TokenStream2, CodegenError> {
let (fields, key_impl) = match storage_entry.ty {
let (fields, key_impl) = match &storage_entry.ty {
StorageEntryType::Plain(_) => (vec![], quote!(vec![])),
StorageEntryType::Map {
ref key,
ref hashers,
..
} => {
StorageEntryType::Map { key, .. } => {
let key_ty = type_gen.resolve_type(key.id());
let hashers = hashers
.iter()
.map(|hasher| {
let hasher = match hasher {
StorageHasher::Blake2_128 => "Blake2_128",
StorageHasher::Blake2_256 => "Blake2_256",
StorageHasher::Blake2_128Concat => "Blake2_128Concat",
StorageHasher::Twox128 => "Twox128",
StorageHasher::Twox256 => "Twox256",
StorageHasher::Twox64Concat => "Twox64Concat",
StorageHasher::Identity => "Identity",
};
let hasher = format_ident!("{}", hasher);
quote!( #crate_path::storage::address::StorageHasher::#hasher )
})
.collect::<Vec<_>>();
match key_ty.type_def() {
// An N-map; return each of the keys separately.
TypeDef::Tuple(tuple) => {
let fields = tuple
.fields()
@@ -121,46 +101,23 @@ fn generate_storage_entry_fns(
})
.collect::<Vec<_>>();
let key_impl = if hashers.len() == fields.len() {
// If the number of hashers matches the number of fields, we're dealing with
// something shaped like a StorageNMap, and each field should be hashed separately
// according to the corresponding hasher.
let keys = hashers
.into_iter()
.zip(&fields)
.map(|(hasher, (field_name, _))| {
quote!( #crate_path::storage::address::StorageMapKey::new(#field_name.borrow(), #hasher) )
});
quote! {
vec![ #( #keys ),* ]
}
} else if hashers.len() == 1 {
// If there is one hasher, then however many fields we have, we want to hash a
// tuple of them using the one hasher we're told about. This corresponds to a
// StorageMap.
let hasher = hashers.get(0).expect("checked for 1 hasher");
let items =
fields.iter().map(|(field_name, _)| quote!( #field_name ));
quote! {
vec![ #crate_path::storage::address::StorageMapKey::new(&(#( #items.borrow() ),*), #hasher) ]
}
} else {
return Err(CodegenError::MismatchHashers(
hashers.len(),
fields.len(),
))
let keys = fields
.iter()
.map(|(field_name, _)| {
quote!( #crate_path::storage::address::StaticStorageMapKey::new(#field_name.borrow()) )
});
let key_impl = quote! {
vec![ #( #keys ),* ]
};
(fields, key_impl)
}
// A map with a single key; return the single key.
_ => {
let ty_path = type_gen.resolve_type_path(key.id());
let fields = vec![(format_ident!("_0"), ty_path)];
let Some(hasher) = hashers.get(0) else {
return Err(CodegenError::MissingHasher)
};
let key_impl = quote! {
vec![ #crate_path::storage::address::StorageMapKey::new(_0.borrow(), #hasher) ]
vec![ #crate_path::storage::address::StaticStorageMapKey::new(_0.borrow()) ]
};
(fields, key_impl)
}
@@ -233,8 +190,14 @@ fn generate_storage_entry_fns(
#docs
pub fn #fn_name_root(
&self,
) -> #crate_path::storage::address::StaticStorageAddress::<#crate_path::metadata::DecodeStaticType<#storage_entry_value_ty>, (), #is_defaultable_type, #is_iterable_type> {
#crate_path::storage::address::StaticStorageAddress::new(
) -> #crate_path::storage::address::Address::<
#crate_path::storage::address::StaticStorageMapKey,
#storage_entry_value_ty,
(),
#is_defaultable_type,
#is_iterable_type
> {
#crate_path::storage::address::Address::new_static(
#pallet_name,
#storage_name,
Vec::new(),
@@ -252,8 +215,14 @@ fn generate_storage_entry_fns(
pub fn #fn_name(
&self,
#( #key_args, )*
) -> #crate_path::storage::address::StaticStorageAddress::<#crate_path::metadata::DecodeStaticType<#storage_entry_value_ty>, #crate_path::storage::address::Yes, #is_defaultable_type, #is_iterable_type> {
#crate_path::storage::address::StaticStorageAddress::new(
) -> #crate_path::storage::address::Address::<
#crate_path::storage::address::StaticStorageMapKey,
#storage_entry_value_ty,
#crate_path::storage::address::Yes,
#is_defaultable_type,
#is_iterable_type
> {
#crate_path::storage::address::Address::new_static(
#pallet_name,
#storage_name,
#key_impl,
+56 -13
View File
@@ -5,7 +5,6 @@
use crate::CratePath;
use syn::{
parse_quote,
punctuated::Punctuated,
Path,
};
@@ -21,7 +20,7 @@ pub struct DerivesRegistry {
}
impl DerivesRegistry {
/// Creates a new `DeviceRegistry` with the supplied `crate_path`.
/// Creates a new `DerivesRegistry` with the supplied `crate_path`.
///
/// The `crate_path` denotes the `subxt` crate access path in the
/// generated code.
@@ -61,23 +60,27 @@ impl DerivesRegistry {
/// - Any user-defined derives for all types via `generated_type_derives`
/// - Any user-defined derives for this specific type
pub fn resolve(&self, ty: &syn::TypePath) -> Derives {
let mut defaults = self.default_derives.derives.clone();
let mut resolved_derives = self.default_derives.clone();
if let Some(specific) = self.specific_type_derives.get(ty) {
defaults.extend(specific.derives.iter().cloned());
resolved_derives.extend_from(specific.clone());
}
Derives { derives: defaults }
resolved_derives
}
}
#[derive(Debug, Clone)]
pub struct Derives {
derives: HashSet<syn::Path>,
attributes: HashSet<syn::Attribute>,
}
impl FromIterator<syn::Path> for Derives {
fn from_iter<T: IntoIterator<Item = Path>>(iter: T) -> Self {
let derives = iter.into_iter().collect();
Self { derives }
Self {
derives,
attributes: HashSet::new(),
}
}
}
@@ -86,26 +89,55 @@ impl Derives {
/// to the set of default derives that reside in `subxt`.
pub fn new(crate_path: &CratePath) -> Self {
let mut derives = HashSet::new();
let mut attributes = HashSet::new();
derives.insert(syn::parse_quote!(#crate_path::ext::scale_encode::EncodeAsType));
let encode_crate_path =
quote::quote! { #crate_path::ext::scale_encode }.to_string();
attributes.insert(
syn::parse_quote!(#[encode_as_type(crate_path = #encode_crate_path)]),
);
derives.insert(syn::parse_quote!(#crate_path::ext::scale_decode::DecodeAsType));
let decode_crate_path =
quote::quote! { #crate_path::ext::scale_decode }.to_string();
attributes.insert(
syn::parse_quote!(#[decode_as_type(crate_path = #decode_crate_path)]),
);
derives.insert(syn::parse_quote!(#crate_path::ext::codec::Encode));
derives.insert(syn::parse_quote!(#crate_path::ext::codec::Decode));
derives.insert(syn::parse_quote!(Debug));
Self { derives }
Self {
derives,
attributes,
}
}
/// Extend this set of `Derives` from another.
pub fn extend_from(&mut self, other: Derives) {
self.derives.extend(other.derives.into_iter());
self.attributes.extend(other.attributes.into_iter());
}
/// Add `#crate_path::ext::codec::CompactAs` to the derives.
pub fn insert_codec_compact_as(&mut self, crate_path: &CratePath) {
self.insert(parse_quote!(#crate_path::ext::codec::CompactAs));
self.insert_derive(parse_quote!(#crate_path::ext::codec::CompactAs));
}
pub fn append(&mut self, derives: impl Iterator<Item = syn::Path>) {
for derive in derives {
self.insert(derive)
self.insert_derive(derive)
}
}
pub fn insert(&mut self, derive: syn::Path) {
pub fn insert_derive(&mut self, derive: syn::Path) {
self.derives.insert(derive);
}
pub fn insert_attribute(&mut self, attribute: syn::Attribute) {
self.attributes.insert(attribute);
}
}
impl quote::ToTokens for Derives {
@@ -117,10 +149,21 @@ impl quote::ToTokens for Derives {
.to_string()
.cmp(&quote::quote!(#b).to_string())
});
let derives: Punctuated<syn::Path, syn::Token![,]> =
sorted.iter().cloned().collect();
tokens.extend(quote::quote! {
#[derive(#derives)]
#[derive(#( #sorted ),*)]
})
}
if !self.attributes.is_empty() {
let mut sorted = self.attributes.iter().cloned().collect::<Vec<_>>();
sorted.sort_by(|a, b| {
quote::quote!(#a)
.to_string()
.cmp(&quote::quote!(#b).to_string())
});
tokens.extend(quote::quote! {
#( #sorted )*
})
}
}
+4 -1
View File
@@ -41,7 +41,10 @@ pub use self::{
Derives,
DerivesRegistry,
},
substitutes::TypeSubstitutes,
substitutes::{
AbsolutePath,
TypeSubstitutes,
},
type_def::TypeDefGen,
type_def_params::TypeDefParameters,
type_path::{
+67 -38
View File
@@ -108,50 +108,79 @@ impl TypeSubstitutes {
}
}
/// Only insert the given substitution if a substitution at that path doesn't
/// already exist.
pub fn insert_if_not_exists(
&mut self,
source: syn::Path,
target: AbsolutePath,
) -> Result<(), CodegenError> {
let (key, val) = TypeSubstitutes::parse_path_substitution(source, target.0)?;
self.substitutes.entry(key).or_insert(val);
Ok(())
}
/// Add a bunch of source to target type substitutions.
pub fn extend(
&mut self,
elems: impl IntoIterator<Item = (syn::Path, AbsolutePath)>,
) -> Result<(), CodegenError> {
let to_extend = elems.into_iter().map(|(path, AbsolutePath(mut with))| {
let Some(syn::PathSegment { arguments: src_path_args, ..}) = path.segments.last() else {
return Err(CodegenError::EmptySubstitutePath(path.span()))
};
let Some(syn::PathSegment { arguments: target_path_args, ..}) = with.segments.last_mut() else {
return Err(CodegenError::EmptySubstitutePath(with.span()))
};
let source_args: Vec<_> = type_args(src_path_args).collect();
let param_mapping = if source_args.is_empty() {
// If the type parameters on the source type are not specified, then this means that
// the type is either not generic or the user wants to pass through all the parameters
TypeParamMapping::None
} else {
// Describe the mapping in terms of "which source param idx is used for each target param".
// So, for each target param, find the matching source param index.
let mapping = type_args(target_path_args)
.filter_map(|arg|
source_args
.iter()
.position(|&src| src == arg)
.map(|src_idx|
u8::try_from(src_idx).expect("type arguments to be fewer than 256; qed"),
)
).collect();
TypeParamMapping::Specified(mapping)
};
// NOTE: Params are late bound and held separately, so clear them
// here to not mess pretty printing this path and params together
*target_path_args = syn::PathArguments::None;
Ok((PathSegments::from(&path), Substitute { path: with, param_mapping }))
}).collect::<Result<Vec<_>, _>>()?;
self.substitutes.extend(to_extend);
for (source, target) in elems.into_iter() {
let (key, val) = TypeSubstitutes::parse_path_substitution(source, target.0)?;
self.substitutes.insert(key, val);
}
Ok(())
}
/// Given a source and target path, parse the type params to work out the mapping from
/// source to target, and output the source => substitution mapping that we work out from this.
fn parse_path_substitution(
src_path: syn::Path,
mut target_path: syn::Path,
) -> Result<(PathSegments, Substitute), CodegenError> {
let Some(syn::PathSegment { arguments: src_path_args, ..}) = src_path.segments.last() else {
return Err(CodegenError::EmptySubstitutePath(src_path.span()))
};
let Some(syn::PathSegment { arguments: target_path_args, ..}) = target_path.segments.last_mut() else {
return Err(CodegenError::EmptySubstitutePath(target_path.span()))
};
let source_args: Vec<_> = type_args(src_path_args).collect();
let param_mapping = if source_args.is_empty() {
// If the type parameters on the source type are not specified, then this means that
// the type is either not generic or the user wants to pass through all the parameters
TypeParamMapping::None
} else {
// Describe the mapping in terms of "which source param idx is used for each target param".
// So, for each target param, find the matching source param index.
let mapping = type_args(target_path_args)
.filter_map(|arg| {
source_args
.iter()
.position(|&src| src == arg)
.map(|src_idx| {
u8::try_from(src_idx)
.expect("type arguments to be fewer than 256; qed")
})
})
.collect();
TypeParamMapping::Specified(mapping)
};
// Now that we've parsed the type params from our target path, remove said params from
// that path, since we're storing them separately.
*target_path_args = syn::PathArguments::None;
Ok((
PathSegments::from(&src_path),
Substitute {
path: target_path,
param_mapping,
},
))
}
/// Given a source type path, return a substituted type path if a substitution is defined.
pub fn for_path(&self, path: impl Into<PathSegments>) -> Option<&syn::Path> {
self.substitutes.get(&path.into()).map(|s| &s.path)
@@ -249,7 +278,7 @@ fn is_absolute(path: &syn::Path) -> bool {
.map_or(false, |segment| segment.ident == "crate")
}
pub struct AbsolutePath(syn::Path);
pub struct AbsolutePath(pub syn::Path);
impl TryFrom<syn::Path> for AbsolutePath {
type Error = (syn::Path, String);
+127 -43
View File
@@ -57,7 +57,9 @@ fn generate_struct_with_primitives() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub a: ::core::primitive::bool,
pub b: ::core::primitive::u32,
@@ -106,12 +108,16 @@ fn generate_struct_with_a_struct_field() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Child {
pub a: ::core::primitive::i32,
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Parent {
pub a: ::core::primitive::bool,
pub b: root::subxt_codegen::types::tests::Child,
@@ -154,10 +160,14 @@ fn generate_tuple_struct() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Child(pub ::core::primitive::i32,);
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Parent(pub ::core::primitive::bool, pub root::subxt_codegen::types::tests::Child,);
}
}
@@ -239,34 +249,54 @@ fn derive_compact_as_for_uint_wrapper_structs() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Su128 { pub a: ::core::primitive::u128, }
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Su16 { pub a: ::core::primitive::u16, }
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Su32 { pub a: ::core::primitive::u32, }
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Su64 { pub a: ::core::primitive::u64, }
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Su8 { pub a: ::core::primitive::u8, }
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct TSu128(pub ::core::primitive::u128,);
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct TSu16(pub ::core::primitive::u16,);
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct TSu32(pub ::core::primitive::u32,);
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct TSu64(pub ::core::primitive::u64,);
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct TSu8(pub ::core::primitive::u8,);
}
}
@@ -305,7 +335,9 @@ fn generate_enum() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub enum E {
# [codec (index = 0)]
A,
@@ -366,7 +398,9 @@ fn compact_fields() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub enum E {
# [codec (index = 0)]
A {
@@ -377,12 +411,16 @@ fn compact_fields() {
B( #[codec(compact)] ::core::primitive::u32,),
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
#[codec(compact)] pub a: ::core::primitive::u32,
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct TupleStruct(#[codec(compact)] pub ::core::primitive::u32,);
}
}
@@ -426,7 +464,9 @@ fn compact_generic_parameter() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub a: ::core::option::Option<::subxt_path::ext::codec::Compact<::core::primitive::u128> >,
pub nested: ::core::option::Option<::core::result::Result<::subxt_path::ext::codec::Compact<::core::primitive::u128>, ::core::primitive::u8 > >,
@@ -469,7 +509,9 @@ fn generate_array_field() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub a: [::core::primitive::u8; 32usize],
}
@@ -509,7 +551,9 @@ fn option_fields() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub a: ::core::option::Option<::core::primitive::bool>,
pub b: ::core::option::Option<::core::primitive::u32>,
@@ -552,7 +596,9 @@ fn box_fields_struct() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub a: ::std::boxed::Box<::core::primitive::bool>,
pub b: ::std::boxed::Box<::core::primitive::u32>,
@@ -595,7 +641,9 @@ fn box_fields_enum() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub enum E {
# [codec (index = 0)]
A(::std::boxed::Box<::core::primitive::bool>,),
@@ -638,7 +686,9 @@ fn range_fields() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub a: ::core::ops::Range<::core::primitive::u32>,
pub b: ::core::ops::RangeInclusive<::core::primitive::u32>,
@@ -685,12 +735,16 @@ fn generics() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Bar {
pub b: root::subxt_codegen::types::tests::Foo<::core::primitive::u32>,
pub c: root::subxt_codegen::types::tests::Foo<::core::primitive::u8>,
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Foo<_0> {
pub a: _0,
}
@@ -736,12 +790,16 @@ fn generics_nested() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Bar<_0> {
pub b: root::subxt_codegen::types::tests::Foo<_0, ::core::primitive::u32>,
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Foo<_0, _1> {
pub a: _0,
pub b: ::core::option::Option<(_0, _1,)>,
@@ -790,7 +848,9 @@ fn generate_bitvec() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct S {
pub lsb: ::subxt_path::utils::bits::DecodedBits<::core::primitive::u8, ::subxt_path::utils::bits::Lsb0>,
pub msb: ::subxt_path::utils::bits::DecodedBits<::core::primitive::u16, ::subxt_path::utils::bits::Msb0>,
@@ -846,15 +906,21 @@ fn generics_with_alias_adds_phantom_data_marker() {
quote! {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::CompactAs, ::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct NamedFields<_0> {
pub b: ::core::primitive::u32,
#[codec(skip)] pub __subxt_unused_type_params: ::core::marker::PhantomData<_0>
#[codec(skip)]
pub __subxt_unused_type_params: ::core::marker::PhantomData<_0>
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct UnnamedFields<_0, _1> (
pub (::core::primitive::u32, ::core::primitive::u32,),
#[codec(skip)] pub ::core::marker::PhantomData<(_0, _1)>
#[codec(skip)]
pub ::core::marker::PhantomData<(_0, _1)>
);
}
}
@@ -917,20 +983,26 @@ fn modules() {
pub mod b {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Bar {
pub a: root::subxt_codegen::types::tests::m::a::Foo,
}
}
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Foo;
}
pub mod c {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct Foo {
pub a: root::subxt_codegen::types::tests::m::a::b::Bar,
}
@@ -970,7 +1042,9 @@ fn dont_force_struct_names_camel_case() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct AB;
}
}
@@ -1014,10 +1088,14 @@ fn apply_user_defined_derives_for_all_types() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Clone, Debug, Eq)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Clone, Debug, Eq)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct A(pub root :: subxt_codegen :: types :: tests :: B,);
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Clone, Debug, Eq)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Clone, Debug, Eq)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct B;
}
}
@@ -1082,13 +1160,19 @@ fn apply_user_defined_derives_for_specific_types() {
pub mod tests {
use super::root;
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug, Eq)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug, Eq)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct A(pub root :: subxt_codegen :: types :: tests :: B,);
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug, Eq, Hash)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug, Eq, Hash)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct B(pub root :: subxt_codegen :: types :: tests :: C,);
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, Debug, Eq, Ord, PartialOrd)]
#[derive(::subxt_path::ext::codec::Decode, ::subxt_path::ext::codec::Encode, ::subxt_path::ext::scale_decode::DecodeAsType, ::subxt_path::ext::scale_encode::EncodeAsType, Debug, Eq, Ord, PartialOrd)]
#[decode_as_type(crate_path = ":: subxt_path :: ext :: scale_decode")]
#[encode_as_type(crate_path = ":: subxt_path :: ext :: scale_encode")]
pub struct C;
}
}
+9 -8
View File
@@ -10,12 +10,11 @@
//! polkadot --dev --tmp
//! ```
use codec::Decode;
use codec::{
Decode,
Encode,
};
use subxt::{
storage::address::{
StorageHasher,
StorageMapKey,
},
OnlineClient,
PolkadotConfig,
};
@@ -83,11 +82,13 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut query_key = key_addr.to_root_bytes();
// We know that the first key is a u32 (the `XcmVersion`) and is hashed by twox64_concat.
// We can build a `StorageMapKey` that replicates that, and append those bytes to the above.
StorageMapKey::new(2u32, StorageHasher::Twox64Concat).to_bytes(&mut query_key);
// twox64_concat is just the result of running the twox_64 hasher on some value and concatenating
// the value itself after it:
query_key.extend(subxt::ext::sp_core::twox_64(&2u32.encode()));
query_key.extend(&2u32.encode());
// The final query key is essentially the result of:
// `twox_128("XcmPallet") ++ twox_128("VersionNotifiers") ++ twox_64(2u32) ++ 2u32`
// `twox_128("XcmPallet") ++ twox_128("VersionNotifiers") ++ twox_64(scale_encode(2u32)) ++ scale_encode(2u32)`
println!("\nExample 3\nQuery key: 0x{}", hex::encode(&query_key));
let keys = api
+5 -3
View File
@@ -37,9 +37,10 @@ jsonrpsee-web = ["jsonrpsee/async-wasm-client", "jsonrpsee/client-web-transport"
[dependencies]
codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive", "full"] }
scale-info = "2.0.0"
scale-value = "0.6.0"
scale-value = "0.7.0"
scale-bits = "0.3"
scale-decode = "0.4.0"
scale-decode = "0.5.0"
scale-encode = "0.1.0"
futures = { version = "0.3.27", default-features = false, features = ["std"] }
hex = "0.4.3"
jsonrpsee = { version = "0.16", optional = true, features = ["jsonrpsee-types"] }
@@ -50,13 +51,14 @@ tracing = "0.1.34"
parking_lot = "0.12.0"
frame-metadata = "15.0.0"
derivative = "2.2.0"
either = "1.8.1"
subxt-macro = { version = "0.27.1", path = "../macro" }
subxt-metadata = { version = "0.27.1", path = "../metadata" }
# Provides some deserialization, types like U256/H256 and hashing impls like twox/blake256:
impl-serde = { version = "0.4.0" }
primitive-types = { version = "0.12.0", default-features = false, features = ["codec", "scale-info", "serde"] }
primitive-types = { version = "0.12.1", default-features = false, features = ["codec", "scale-info", "serde"] }
sp-core-hashing = "7.0.0"
# For ss58 encoding AccountId32 to serialize them properly:
+3 -3
View File
@@ -23,9 +23,9 @@ use serde::{
};
pub use crate::utils::{
account_id::AccountId32,
multi_address::MultiAddress,
multi_signature::MultiSignature,
AccountId32,
MultiAddress,
MultiSignature,
};
pub use primitive_types::{
H256,
+36 -43
View File
@@ -28,25 +28,39 @@ pub trait ConstantAddress {
}
}
/// This represents a statically generated constant lookup address.
pub struct StaticConstantAddress<ReturnTy> {
pallet_name: &'static str,
constant_name: &'static str,
/// This represents the address of a constant.
pub struct Address<ReturnTy> {
pallet_name: Cow<'static, str>,
constant_name: Cow<'static, str>,
constant_hash: Option<[u8; 32]>,
_marker: std::marker::PhantomData<ReturnTy>,
}
impl<ReturnTy> StaticConstantAddress<ReturnTy> {
/// Create a new [`StaticConstantAddress`] that will be validated
/// The type of address typically used to return dynamic constant values.
pub type DynamicAddress = Address<DecodedValueThunk>;
impl<ReturnTy> Address<ReturnTy> {
/// Create a new [`Address`] to use to look up a constant.
pub fn new(pallet_name: impl Into<String>, constant_name: impl Into<String>) -> Self {
Self {
pallet_name: Cow::Owned(pallet_name.into()),
constant_name: Cow::Owned(constant_name.into()),
constant_hash: None,
_marker: std::marker::PhantomData,
}
}
/// Create a new [`Address`] that will be validated
/// against node metadata using the hash given.
pub fn new(
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
constant_name: &'static str,
hash: [u8; 32],
) -> Self {
Self {
pallet_name,
constant_name,
pallet_name: Cow::Borrowed(pallet_name),
constant_name: Cow::Borrowed(constant_name),
constant_hash: Some(hash),
_marker: std::marker::PhantomData,
}
@@ -63,42 +77,9 @@ impl<ReturnTy> StaticConstantAddress<ReturnTy> {
}
}
impl<ReturnTy: DecodeWithMetadata> ConstantAddress for StaticConstantAddress<ReturnTy> {
impl<ReturnTy: DecodeWithMetadata> ConstantAddress for Address<ReturnTy> {
type Target = ReturnTy;
fn pallet_name(&self) -> &str {
self.pallet_name
}
fn constant_name(&self) -> &str {
self.constant_name
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.constant_hash
}
}
/// This represents a dynamically generated constant address.
pub struct DynamicConstantAddress<'a> {
pallet_name: Cow<'a, str>,
constant_name: Cow<'a, str>,
}
/// Construct a new dynamic constant lookup.
pub fn dynamic<'a>(
pallet_name: impl Into<Cow<'a, str>>,
constant_name: impl Into<Cow<'a, str>>,
) -> DynamicConstantAddress<'a> {
DynamicConstantAddress {
pallet_name: pallet_name.into(),
constant_name: constant_name.into(),
}
}
impl<'a> ConstantAddress for DynamicConstantAddress<'a> {
type Target = DecodedValueThunk;
fn pallet_name(&self) -> &str {
&self.pallet_name
}
@@ -106,4 +87,16 @@ impl<'a> ConstantAddress for DynamicConstantAddress<'a> {
fn constant_name(&self) -> &str {
&self.constant_name
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.constant_hash
}
}
/// Construct a new dynamic constant lookup.
pub fn dynamic(
pallet_name: impl Into<String>,
constant_name: impl Into<String>,
) -> DynamicAddress {
DynamicAddress::new(pallet_name, constant_name)
}
+2 -2
View File
@@ -63,7 +63,7 @@ impl<T: Config, Client: OfflineClientT<T>> ConstantsClient<T, Client> {
pub fn at<Address: ConstantAddress>(
&self,
address: &Address,
) -> Result<<Address::Target as DecodeWithMetadata>::Target, Error> {
) -> Result<Address::Target, Error> {
let metadata = self.client.metadata();
// 1. Validate constant shape if hash given:
@@ -72,7 +72,7 @@ impl<T: Config, Client: OfflineClientT<T>> ConstantsClient<T, Client> {
// 2. Attempt to decode the constant into the type given:
let pallet = metadata.pallet(address.pallet_name())?;
let constant = pallet.constant(address.constant_name())?;
let value = Address::Target::decode_with_metadata(
let value = <Address::Target as DecodeWithMetadata>::decode_with_metadata(
&mut &*constant.value,
constant.ty.id(),
&metadata,
+2 -2
View File
@@ -9,8 +9,8 @@ mod constants_client;
pub use constant_address::{
dynamic,
Address,
ConstantAddress,
DynamicConstantAddress,
StaticConstantAddress,
DynamicAddress,
};
pub use constants_client::ConstantsClient;
+6 -6
View File
@@ -12,6 +12,7 @@ use crate::{
Metadata,
},
};
use scale_decode::DecodeAsType;
pub use scale_value::Value;
@@ -43,13 +44,11 @@ pub struct DecodedValueThunk {
}
impl DecodeWithMetadata for DecodedValueThunk {
type Target = Self;
fn decode_with_metadata(
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<Self::Target, Error> {
) -> Result<Self, Error> {
let mut v = Vec::with_capacity(bytes.len());
v.extend_from_slice(bytes);
*bytes = &[];
@@ -72,10 +71,11 @@ impl DecodedValueThunk {
}
/// Decode the SCALE encoded storage entry into a dynamic [`DecodedValue`] type.
pub fn to_value(&self) -> Result<DecodedValue, Error> {
DecodedValue::decode_with_metadata(
let val = DecodedValue::decode_as_type(
&mut &*self.scale_bytes,
self.type_id,
&self.metadata,
)
self.metadata.types(),
)?;
Ok(val)
}
}
+4 -6
View File
@@ -15,10 +15,8 @@ pub use crate::metadata::{
InvalidMetadataError,
MetadataError,
};
pub use scale_value::scale::{
DecodeError,
EncodeError,
};
pub use scale_decode::Error as DecodeError;
pub use scale_encode::Error as EncodeError;
/// The underlying error enum, generic over the type held by the `Runtime`
/// variant. Prefer to use the [`Error<E>`] and [`Error`] aliases over
@@ -48,10 +46,10 @@ pub enum Error {
Runtime(DispatchError),
/// Error decoding to a [`crate::dynamic::Value`].
#[error("Error decoding into dynamic value: {0}")]
DecodeValue(#[from] DecodeError),
Decode(#[from] DecodeError),
/// Error encoding from a [`crate::dynamic::Value`].
#[error("Error encoding from dynamic value: {0}")]
EncodeValue(#[from] EncodeError<()>),
Encode(#[from] EncodeError),
/// Transaction progress error.
#[error("Transaction error: {0}")]
Transaction(#[from] TransactionError),
+154 -48
View File
@@ -12,14 +12,16 @@ use crate::{
client::OnlineClientT,
error::Error,
events::events_client::get_event_bytes,
metadata::EventMetadata,
metadata::{
DecodeWithMetadata,
EventMetadata,
},
Config,
Metadata,
};
use codec::{
Compact,
Decode,
Error as CodecError,
};
use derivative::Derivative;
use std::sync::Arc;
@@ -203,6 +205,7 @@ impl<T: Config> Events<T> {
#[derive(Debug, Clone)]
pub struct EventDetails {
phase: Phase,
/// The index of the event in the list of events in a given block.
index: u32,
all_bytes: Arc<[u8]>,
// start of the bytes (phase, pallet/variant index and then fields and then topic to follow).
@@ -248,12 +251,13 @@ impl EventDetails {
// Skip over the bytes belonging to this event.
for field_metadata in event_metadata.fields() {
// Skip over the bytes for this field:
scale_decode::decode(
scale_decode::visitor::decode_with_visitor(
input,
field_metadata.type_id(),
field_metadata.ty().id(),
&metadata.runtime_metadata().types,
scale_decode::visitor::IgnoreVisitor,
)?;
)
.map_err(scale_decode::Error::from)?;
}
// the end of the field bytes.
@@ -343,63 +347,98 @@ impl EventDetails {
let bytes = &mut self.field_bytes();
let event_metadata = self.event_metadata();
// If the first field has a name, we assume that the rest do too (it'll either
// be a named struct or a tuple type). If no fields, assume unnamed.
let is_named = event_metadata
.fields()
.get(0)
.map(|fm| fm.name().is_some())
.unwrap_or(false);
use scale_decode::DecodeAsFields;
let decoded =
<scale_value::Composite<scale_value::scale::TypeId>>::decode_as_fields(
bytes,
event_metadata.fields(),
&self.metadata.runtime_metadata().types,
)?;
if !is_named {
let mut event_values = vec![];
for field_metadata in event_metadata.fields() {
let value = scale_value::scale::decode_as_type(
bytes,
field_metadata.type_id(),
&self.metadata.runtime_metadata().types,
)?;
event_values.push(value);
}
Ok(scale_value::Composite::Unnamed(event_values))
} else {
let mut event_values = vec![];
for field_metadata in event_metadata.fields() {
let value = scale_value::scale::decode_as_type(
bytes,
field_metadata.type_id(),
&self.metadata.runtime_metadata().types,
)?;
event_values
.push((field_metadata.name().unwrap_or_default().to_string(), value));
}
Ok(scale_value::Composite::Named(event_values))
}
Ok(decoded)
}
/// Attempt to decode these [`EventDetails`] into a specific static event.
/// This targets the fields within the event directly. You can also attempt to
/// decode the entirety of the event type (including the pallet and event
/// variants) using [`EventDetails::as_root_event()`].
pub fn as_event<E: StaticEvent>(&self) -> Result<Option<E>, CodecError> {
/// Attempt to statically decode these [`EventDetails`] into a type representing the event
/// fields. This leans directly on [`codec::Decode`]. You can also attempt to decode the entirety
/// of the event using [`EventDetails::as_root_event()`], which is more lenient because it's able
/// to lean on [`scale_decode::DecodeAsType`].
pub fn as_event<E: StaticEvent>(&self) -> Result<Option<E>, Error> {
let ev_metadata = self.event_metadata();
if ev_metadata.pallet() == E::PALLET && ev_metadata.event() == E::EVENT {
Ok(Some(E::decode(&mut self.field_bytes())?))
let decoded = E::decode_as_fields(
&mut self.field_bytes(),
ev_metadata.fields(),
self.metadata.types(),
)?;
Ok(Some(decoded))
} else {
Ok(None)
}
}
/// Attempt to decode these [`EventDetails`] into a pallet event type (which includes
/// the pallet enum variants as well as the event fields). These events can be found in
/// the static codegen under a path like `pallet_name::Event`.
pub fn as_pallet_event<E: DecodeWithMetadata>(&self) -> Result<E, Error> {
let pallet = self.metadata.pallet(self.pallet_name())?;
let event_ty = pallet.event_ty_id().ok_or_else(|| {
Error::Metadata(crate::metadata::MetadataError::EventNotFound(
pallet.index(),
self.variant_index(),
))
})?;
// Ignore the root enum index, so start 1 byte after that:
let start_idx = self.event_start_idx + 1;
let decoded = E::decode_with_metadata(
&mut &self.all_bytes[start_idx..self.event_fields_end_idx],
event_ty,
&self.metadata,
)?;
Ok(decoded)
}
/// Attempt to decode these [`EventDetails`] into a root event type (which includes
/// the pallet and event enum variants as well as the event fields). A compatible
/// type for this is exposed via static codegen as a root level `Event` type.
pub fn as_root_event<E: Decode>(&self) -> Result<E, CodecError> {
E::decode(&mut &self.all_bytes[self.event_start_idx..self.event_fields_end_idx])
pub fn as_root_event<E: RootEvent>(&self) -> Result<E, Error> {
let pallet_bytes =
&self.all_bytes[self.event_start_idx + 1..self.event_fields_end_idx];
let pallet = self.metadata.pallet(self.pallet_name())?;
let pallet_event_ty = pallet.event_ty_id().ok_or_else(|| {
Error::Metadata(crate::metadata::MetadataError::EventNotFound(
pallet.index(),
self.variant_index(),
))
})?;
E::root_event(
pallet_bytes,
self.pallet_name(),
pallet_event_ty,
&self.metadata,
)
}
}
/// This trait is implemented on the statically generated root event type, so that we're able
/// to decode it properly via a pallet event that impls `DecodeAsMetadata`. This is necessary
/// becasue the "root event" type is generated using pallet info but doesn't actually exist in the
/// metadata types, so we have no easy way to decode things into it via type information and need a
/// little help via codegen.
#[doc(hidden)]
pub trait RootEvent: Sized {
/// Given details of the pallet event we want to decode, and the name of the pallet, try to hand
/// back a "root event".
fn root_event(
pallet_bytes: &[u8],
pallet_name: &str,
pallet_event_ty: u32,
metadata: &Metadata,
) -> Result<Self, Error>;
}
/// Event related test utilities used outside this module.
#[cfg(test)]
pub(crate) mod test_utils {
@@ -425,11 +464,40 @@ pub(crate) mod test_utils {
use std::convert::TryFrom;
/// An "outer" events enum containing exactly one event.
#[derive(Encode, Decode, TypeInfo, Clone, Debug, PartialEq, Eq)]
#[derive(
Encode,
Decode,
TypeInfo,
Clone,
Debug,
PartialEq,
Eq,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
pub enum AllEvents<Ev> {
Test(Ev),
}
// We need this in order to be able to decode into a root event type:
impl<Ev: DecodeWithMetadata> RootEvent for AllEvents<Ev> {
fn root_event(
mut bytes: &[u8],
pallet_name: &str,
pallet_event_ty: u32,
metadata: &Metadata,
) -> Result<Self, Error> {
if pallet_name == "Test" {
return Ok(AllEvents::Test(Ev::decode_with_metadata(
&mut bytes,
pallet_event_ty,
metadata,
)?))
}
panic!("Asked for pallet name '{pallet_name}', which isn't in our test AllEvents type")
}
}
/// This encodes to the same format an event is expected to encode to
/// in node System.Events storage.
#[derive(Encode)]
@@ -556,6 +624,7 @@ mod tests {
// Make sure that the bytes handed back line up with the fields handed back;
// encode the fields back into bytes and they should be equal.
let actual_fields = actual.field_values().expect("can decode field values (1)");
let mut actual_bytes = vec![];
for field in actual_fields.into_values() {
scale_value::scale::encode_as_type(
@@ -587,7 +656,9 @@ mod tests {
#[test]
fn statically_decode_single_root_event() {
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo)]
#[derive(
Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType,
)]
enum Event {
A(u8, bool, Vec<String>),
}
@@ -618,6 +689,41 @@ mod tests {
assert_eq!(decoded_event, AllEvents::Test(event));
}
#[test]
fn statically_decode_single_pallet_event() {
#[derive(
Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType,
)]
enum Event {
A(u8, bool, Vec<String>),
}
// Create fake metadata that knows about our single event, above:
let metadata = metadata::<Event>();
// Encode our events in the format we expect back from a node, and
// construst an Events object to iterate them:
let event = Event::A(1, true, vec!["Hi".into()]);
let events = events::<Event>(
metadata,
vec![event_record(Phase::ApplyExtrinsic(123), event.clone())],
);
let ev = events
.iter()
.next()
.expect("one event expected")
.expect("event should be extracted OK");
// This is the line we're testing; decode into our "pallet event" enum.
let decoded_event = ev
.as_pallet_event::<Event>()
.expect("can decode event into root enum again");
// It should equal the event we put in:
assert_eq!(decoded_event, event);
}
#[test]
fn dynamically_decode_single_event() {
#[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo)]
+9 -7
View File
@@ -9,16 +9,18 @@
mod events_client;
mod events_type;
pub use events_client::EventsClient;
pub use events_type::{
EventDetails,
Events,
};
use codec::{
Decode,
Encode,
};
pub use events_client::EventsClient;
pub use events_type::{
EventDetails,
Events,
// Used in codegen but hidden from docs:
RootEvent,
};
use scale_decode::DecodeAsFields;
/// Trait to uniquely identify the events's identity from the runtime metadata.
///
@@ -26,7 +28,7 @@ use codec::{
///
/// The trait is utilized to decode emitted events from a block, via obtaining the
/// form of the `Event` from the metadata.
pub trait StaticEvent: Decode {
pub trait StaticEvent: DecodeAsFields {
/// Pallet name.
const PALLET: &'static str;
/// Event name.
+2
View File
@@ -181,6 +181,8 @@ pub mod ext {
pub use codec;
pub use frame_metadata;
pub use scale_bits;
pub use scale_decode;
pub use scale_encode;
pub use scale_value;
#[cfg(feature = "substrate-compat")]
pub use sp_core;
@@ -0,0 +1,51 @@
// Copyright 2019-2022 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::Metadata;
use crate::error::Error;
/// This trait is implemented for all types that also implement [`scale_decode::DecodeAsType`].
pub trait DecodeWithMetadata: Sized {
/// Given some metadata and a type ID, attempt to SCALE decode the provided bytes into `Self`.
fn decode_with_metadata(
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<Self, Error>;
}
impl<T: scale_decode::DecodeAsType> DecodeWithMetadata for T {
fn decode_with_metadata(
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<T, Error> {
let val = T::decode_as_type(bytes, type_id, metadata.types())?;
Ok(val)
}
}
/// This trait is implemented for all types that also implement [`scale_encode::EncodeAsType`].
pub trait EncodeWithMetadata {
/// SCALE encode this type to bytes, possibly with the help of metadata.
fn encode_with_metadata(
&self,
type_id: u32,
metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error>;
}
impl<T: scale_encode::EncodeAsType> EncodeWithMetadata for T {
/// SCALE encode this type to bytes, possibly with the help of metadata.
fn encode_with_metadata(
&self,
type_id: u32,
metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error> {
self.encode_as_type_to(type_id, metadata.types(), bytes)?;
Ok(())
}
}
@@ -1,79 +0,0 @@
// Copyright 2019-2022 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::Metadata;
use crate::{
dynamic::DecodedValue,
error::Error,
};
use codec::Decode;
use frame_metadata::StorageEntryType;
/// This trait is implemented for types which can be decoded with the help of metadata.
pub trait DecodeWithMetadata {
/// The type that we'll get back from decoding.
type Target;
/// Given some metadata and a type ID, attempt to SCALE decode the provided bytes into `Self`.
fn decode_with_metadata(
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<Self::Target, Error>;
/// Decode a storage item using metadata. By default, this uses the metadata to
/// work out the type ID to use, but for static items we can short circuit this
/// lookup.
fn decode_storage_with_metadata(
bytes: &mut &[u8],
pallet_name: &str,
storage_entry: &str,
metadata: &Metadata,
) -> Result<Self::Target, Error> {
let ty = &metadata.pallet(pallet_name)?.storage(storage_entry)?.ty;
let id = match ty {
StorageEntryType::Plain(ty) => ty.id(),
StorageEntryType::Map { value, .. } => value.id(),
};
Self::decode_with_metadata(bytes, id, metadata)
}
}
// Things can be dynamically decoded to our Value type:
impl DecodeWithMetadata for DecodedValue {
type Target = Self;
fn decode_with_metadata(
bytes: &mut &[u8],
type_id: u32,
metadata: &Metadata,
) -> Result<Self::Target, Error> {
let res = scale_value::scale::decode_as_type(bytes, type_id, metadata.types())?;
Ok(res)
}
}
/// Any type implementing [`Decode`] can also be decoded with the help of metadata.
pub struct DecodeStaticType<T>(std::marker::PhantomData<T>);
impl<T: Decode> DecodeWithMetadata for DecodeStaticType<T> {
type Target = T;
fn decode_with_metadata(
bytes: &mut &[u8],
_type_id: u32,
_metadata: &Metadata,
) -> Result<Self::Target, Error> {
T::decode(bytes).map_err(|e| e.into())
}
fn decode_storage_with_metadata(
bytes: &mut &[u8],
_pallet_name: &str,
_storage_entry: &str,
_metadata: &Metadata,
) -> Result<Self::Target, Error> {
T::decode(bytes).map_err(|e| e.into())
}
}
@@ -1,67 +0,0 @@
// Copyright 2019-2022 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use crate::{
dynamic::Value,
error::Error,
metadata::Metadata,
};
use codec::Encode;
/// This trait is implemented for types which can be encoded with the help of metadata.
pub trait EncodeWithMetadata {
/// SCALE encode this type to bytes, possibly with the help of metadata.
fn encode_with_metadata(
&self,
type_id: u32,
metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error>;
}
impl EncodeWithMetadata for Value<()> {
fn encode_with_metadata(
&self,
type_id: u32,
metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error> {
scale_value::scale::encode_as_type(self, type_id, metadata.types(), bytes)
.map_err(|e| e.into())
}
}
/// Any type implementing [`Encode`] can also be encoded with the help of metadata.
pub struct EncodeStaticType<T>(pub T);
impl<T: Encode> EncodeWithMetadata for EncodeStaticType<T> {
fn encode_with_metadata(
&self,
_type_id: u32,
_metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error> {
self.0.encode_to(bytes);
Ok(())
}
}
// We can transparently Encode anything wrapped in EncodeStaticType, too.
impl<E: Encode> Encode for EncodeStaticType<E> {
fn size_hint(&self) -> usize {
self.0.size_hint()
}
fn encode_to<T: codec::Output + ?Sized>(&self, dest: &mut T) {
self.0.encode_to(dest)
}
fn encode(&self) -> Vec<u8> {
self.0.encode()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
self.0.using_encoded(f)
}
fn encoded_size(&self) -> usize {
self.0.encoded_size()
}
}
+45 -48
View File
@@ -78,13 +78,19 @@ pub enum MetadataError {
#[derive(Debug)]
struct MetadataInner {
metadata: RuntimeMetadataV14,
pallets: HashMap<String, PalletMetadata>,
// Events are hashed by pallet an error index (decode oriented)
events: HashMap<(u8, u8), EventMetadata>,
// Errors are hashed by pallet index.
// Errors are hashed by pallet and error index (decode oriented)
errors: HashMap<(u8, u8), ErrorMetadata>,
// Other pallet details are hashed by pallet name.
pallets: HashMap<String, PalletMetadata>,
// Type of the DispatchError type, which is what comes back if
// an extrinsic fails.
dispatch_error_ty: Option<u32>,
// The hashes uniquely identify parts of the metadata; different
// hashes mean some type difference exists between static and runtime
// versions. We cache them here to avoid recalculating:
@@ -245,8 +251,9 @@ impl Metadata {
pub struct PalletMetadata {
index: u8,
name: String,
call_indexes: HashMap<String, u8>,
call_metadata: HashMap<String, CallMetadata>,
call_ty_id: Option<u32>,
event_ty_id: Option<u32>,
storage: HashMap<String, StorageEntryMetadata<PortableForm>>,
constants: HashMap<String, PalletConstantMetadata<PortableForm>>,
}
@@ -268,11 +275,17 @@ impl PalletMetadata {
self.call_ty_id
}
/// If events exist for this pallet, this returns the type ID of the variant
/// representing the different possible events.
pub fn event_ty_id(&self) -> Option<u32> {
self.event_ty_id
}
/// Attempt to resolve a call into an index in this pallet, failing
/// if the call is not found in this pallet.
pub fn call_index(&self, function: &str) -> Result<u8, MetadataError> {
let fn_index = *self
.call_indexes
pub fn call(&self, function: &str) -> Result<&CallMetadata, MetadataError> {
let fn_index = self
.call_metadata
.get(function)
.ok_or(MetadataError::CallNotFound)?;
Ok(fn_index)
@@ -297,37 +310,21 @@ impl PalletMetadata {
}
}
/// Metadata for specific field.
#[derive(Clone, Debug)]
pub struct EventFieldMetadata {
name: Option<String>,
type_name: Option<String>,
type_id: u32,
pub struct CallMetadata {
call_index: u8,
fields: Vec<scale_info::Field<scale_info::form::PortableForm>>,
}
impl EventFieldMetadata {
/// Construct a new [`EventFieldMetadata`]
pub fn new(name: Option<String>, type_name: Option<String>, type_id: u32) -> Self {
EventFieldMetadata {
name,
type_name,
type_id,
}
impl CallMetadata {
/// Index of this call.
pub fn index(&self) -> u8 {
self.call_index
}
/// Get the name of the field.
pub fn name(&self) -> Option<&str> {
self.name.as_deref()
}
/// Get the type name of the field as it appears in the code
pub fn type_name(&self) -> Option<&str> {
self.type_name.as_deref()
}
/// Get the id of a type
pub fn type_id(&self) -> u32 {
self.type_id
/// The names, type names & types of each field in the call data.
pub fn fields(&self) -> &[scale_info::Field<scale_info::form::PortableForm>] {
&self.fields
}
}
@@ -338,7 +335,7 @@ pub struct EventMetadata {
// behind an Arc to avoid lots of needless clones of it existing.
pallet: Arc<str>,
event: String,
fields: Vec<EventFieldMetadata>,
fields: Vec<scale_info::Field<scale_info::form::PortableForm>>,
docs: Vec<String>,
}
@@ -354,7 +351,7 @@ impl EventMetadata {
}
/// The names, type names & types of each field in the event.
pub fn fields(&self) -> &[EventFieldMetadata] {
pub fn fields(&self) -> &[scale_info::Field<scale_info::form::PortableForm>] {
&self.fields
}
@@ -437,14 +434,23 @@ impl TryFrom<RuntimeMetadataPrefixed> for Metadata {
.iter()
.map(|pallet| {
let call_ty_id = pallet.calls.as_ref().map(|c| c.ty.id());
let event_ty_id = pallet.event.as_ref().map(|e| e.ty.id());
let call_indexes =
let call_metadata =
pallet.calls.as_ref().map_or(Ok(HashMap::new()), |call| {
let type_def_variant = get_type_def_variant(call.ty.id())?;
let call_indexes = type_def_variant
.variants()
.iter()
.map(|v| (v.name().clone(), v.index()))
.map(|v| {
(
v.name().clone(),
CallMetadata {
call_index: v.index(),
fields: v.fields().to_vec(),
},
)
})
.collect();
Ok(call_indexes)
})?;
@@ -466,8 +472,9 @@ impl TryFrom<RuntimeMetadataPrefixed> for Metadata {
let pallet_metadata = PalletMetadata {
index: pallet.index,
name: pallet.name.to_string(),
call_indexes,
call_metadata,
call_ty_id,
event_ty_id,
storage,
constants,
};
@@ -488,17 +495,7 @@ impl TryFrom<RuntimeMetadataPrefixed> for Metadata {
EventMetadata {
pallet: pallet_name.clone(),
event: variant.name().to_owned(),
fields: variant
.fields()
.iter()
.map(|f| {
EventFieldMetadata::new(
f.name().map(|n| n.to_owned()),
f.type_name().map(|n| n.to_owned()),
f.ty().id(),
)
})
.collect(),
fields: variant.fields().to_vec(),
docs: variant.docs().to_vec(),
},
);
+2 -9
View File
@@ -4,8 +4,7 @@
//! Types representing the metadata obtained from a node.
mod decode_with_metadata;
mod encode_with_metadata;
mod decode_encode_traits;
mod hash_cache;
mod metadata_location;
mod metadata_type;
@@ -14,7 +13,6 @@ pub use metadata_location::MetadataLocation;
pub use metadata_type::{
ErrorMetadata,
EventFieldMetadata,
EventMetadata,
InvalidMetadataError,
Metadata,
@@ -22,12 +20,7 @@ pub use metadata_type::{
PalletMetadata,
};
pub use decode_with_metadata::{
DecodeStaticType,
pub use decode_encode_traits::{
DecodeWithMetadata,
};
pub use encode_with_metadata::{
EncodeStaticType,
EncodeWithMetadata,
};
+5 -13
View File
@@ -21,30 +21,22 @@
//! Fetching storage keys
//!
//! ```no_run
//! # #[tokio::main]
//! # async fn main() {
//! use subxt::{ PolkadotConfig, OnlineClient, storage::StorageKey };
//!
//! #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")]
//! pub mod polkadot {}
//!
//! # #[tokio::main]
//! # async fn main() {
//! let api = OnlineClient::<PolkadotConfig>::new().await.unwrap();
//!
//! let key = polkadot::storage()
//! .xcm_pallet()
//! .version_notifiers_root()
//! .to_bytes();
//!
//! // Fetch up to 10 keys.
//! let keys = api
//! let genesis_hash = api
//! .rpc()
//! .storage_keys_paged(&key, 10, None, None)
//! .genesis_hash()
//! .await
//! .unwrap();
//!
//! for key in keys.iter() {
//! println!("Key: 0x{}", hex::encode(&key));
//! }
//! println!("{genesis_hash}");
//! # }
//! ```
+6 -14
View File
@@ -9,33 +9,25 @@
//!
//! # Example
//!
//! Fetching storage keys
//! Fetching the chain genesis hash.
//!
//! ```no_run
//! # #[tokio::main]
//! # async fn main() {
//! use subxt::{ PolkadotConfig, OnlineClient, storage::StorageKey };
//!
//! #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")]
//! pub mod polkadot {}
//!
//! # #[tokio::main]
//! # async fn main() {
//! let api = OnlineClient::<PolkadotConfig>::new().await.unwrap();
//!
//! let key = polkadot::storage()
//! .xcm_pallet()
//! .version_notifiers_root()
//! .to_bytes();
//!
//! // Fetch up to 10 keys.
//! let keys = api
//! let genesis_hash = api
//! .rpc()
//! .storage_keys_paged(&key, 10, None, None)
//! .genesis_hash()
//! .await
//! .unwrap();
//!
//! for key in keys.iter() {
//! println!("Key: 0x{}", hex::encode(&key));
//! }
//! println!("{genesis_hash}");
//! # }
//! ```
+10 -16
View File
@@ -6,7 +6,6 @@
mod storage_address;
mod storage_client;
mod storage_map_key;
mod storage_type;
pub mod utils;
@@ -24,19 +23,14 @@ pub use crate::rpc::types::StorageKey;
/// Types representing an address which describes where a storage
/// entry lives and how to properly decode it.
pub mod address {
pub use super::{
storage_address::{
dynamic,
dynamic_root,
DynamicStorageAddress,
StaticStorageAddress,
StorageAddress,
Yes,
},
storage_map_key::{
StorageHasher,
StorageMapKey,
},
pub use super::storage_address::{
dynamic,
dynamic_root,
Address,
DynamicAddress,
StaticStorageMapKey,
StorageAddress,
Yes,
};
}
@@ -45,7 +39,7 @@ pub mod address {
pub use storage_address::{
dynamic,
dynamic_root,
DynamicStorageAddress,
StaticStorageAddress,
Address,
DynamicAddress,
StorageAddress,
};
+118 -112
View File
@@ -2,7 +2,6 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::storage_map_key::StorageMapKey;
use crate::{
dynamic::{
DecodedValueThunk,
@@ -18,13 +17,13 @@ use crate::{
Metadata,
},
};
use frame_metadata::StorageEntryType;
use frame_metadata::{
StorageEntryType,
StorageHasher,
};
use scale_info::TypeDef;
use std::borrow::Cow;
// We use this type a bunch, so export it from here.
pub use frame_metadata::StorageHasher;
/// This represents a storage address. Anything implementing this trait
/// can be used to fetch and iterate over storage entries.
pub trait StorageAddress {
@@ -66,34 +65,55 @@ pub trait StorageAddress {
/// fetched and returned with a default value in the type system.
pub struct Yes;
/// This represents a statically generated storage lookup address.
pub struct StaticStorageAddress<ReturnTy, Fetchable, Defaultable, Iterable> {
pallet_name: &'static str,
entry_name: &'static str,
// How to access the specific value at that storage address.
storage_entry_keys: Vec<StorageMapKey>,
// Hash provided from static code for validation.
/// A concrete storage address. This can be created from static values (ie those generated
/// via the `subxt` macro) or dynamic values via [`dynamic`] and [`dynamic_root`].
pub struct Address<StorageKey, ReturnTy, Fetchable, Defaultable, Iterable> {
pallet_name: Cow<'static, str>,
entry_name: Cow<'static, str>,
storage_entry_keys: Vec<StorageKey>,
validation_hash: Option<[u8; 32]>,
_marker: std::marker::PhantomData<(ReturnTy, Fetchable, Defaultable, Iterable)>,
}
impl<ReturnTy, Fetchable, Defaultable, Iterable>
StaticStorageAddress<ReturnTy, Fetchable, Defaultable, Iterable>
/// A typical storage address constructed at runtime rather than via the `subxt` macro; this
/// has no restriction on what it can be used for (since we don't statically know).
pub type DynamicAddress<StorageKey> =
Address<StorageKey, DecodedValueThunk, Yes, Yes, Yes>;
impl<StorageKey, ReturnTy, Fetchable, Defaultable, Iterable>
Address<StorageKey, ReturnTy, Fetchable, Defaultable, Iterable>
where
StorageKey: EncodeWithMetadata,
ReturnTy: DecodeWithMetadata,
{
/// Create a new [`StaticStorageAddress`] that will be validated
/// against node metadata using the hash given.
/// Create a new [`Address`] to use to access a storage entry.
pub fn new(
pallet_name: impl Into<String>,
entry_name: impl Into<String>,
storage_entry_keys: Vec<StorageKey>,
) -> Self {
Self {
pallet_name: Cow::Owned(pallet_name.into()),
entry_name: Cow::Owned(entry_name.into()),
storage_entry_keys: storage_entry_keys.into_iter().collect(),
validation_hash: None,
_marker: std::marker::PhantomData,
}
}
/// Create a new [`Address`] using static strings for the pallet and call name.
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
entry_name: &'static str,
storage_entry_keys: Vec<StorageMapKey>,
storage_entry_keys: Vec<StorageKey>,
hash: [u8; 32],
) -> Self {
Self {
pallet_name,
entry_name,
storage_entry_keys,
pallet_name: Cow::Borrowed(pallet_name),
entry_name: Cow::Borrowed(entry_name),
storage_entry_keys: storage_entry_keys.into_iter().collect(),
validation_hash: Some(hash),
_marker: std::marker::PhantomData,
}
@@ -107,100 +127,24 @@ where
}
}
/// Return bytes representing this storage entry.
pub fn to_bytes(&self) -> Vec<u8> {
let mut bytes = Vec::new();
super::utils::write_storage_address_root_bytes(self, &mut bytes);
for entry in &self.storage_entry_keys {
entry.to_bytes(&mut bytes);
}
bytes
}
/// Return bytes representing the root of this storage entry (ie a hash of
/// the pallet and entry name).
/// the pallet and entry name). Use [`crate::storage::StorageClient::address_bytes()`]
/// to obtain the bytes representing the entire address.
pub fn to_root_bytes(&self) -> Vec<u8> {
super::utils::storage_address_root_bytes(self)
}
}
impl<ReturnTy, Fetchable, Defaultable, Iterable> StorageAddress
for StaticStorageAddress<ReturnTy, Fetchable, Defaultable, Iterable>
impl<StorageKey, ReturnTy, Fetchable, Defaultable, Iterable> StorageAddress
for Address<StorageKey, ReturnTy, Fetchable, Defaultable, Iterable>
where
StorageKey: EncodeWithMetadata,
ReturnTy: DecodeWithMetadata,
{
type Target = ReturnTy;
type IsFetchable = Fetchable;
type IsDefaultable = Defaultable;
type IsIterable = Iterable;
type IsFetchable = Fetchable;
fn pallet_name(&self) -> &str {
self.pallet_name
}
fn entry_name(&self) -> &str {
self.entry_name
}
fn append_entry_bytes(
&self,
_metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error> {
for entry in &self.storage_entry_keys {
entry.to_bytes(bytes);
}
Ok(())
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.validation_hash
}
}
/// This represents a dynamically generated storage address.
pub struct DynamicStorageAddress<'a, Encodable> {
pallet_name: Cow<'a, str>,
entry_name: Cow<'a, str>,
storage_entry_keys: Vec<Encodable>,
}
/// Construct a new dynamic storage lookup to the root of some entry.
pub fn dynamic_root<'a>(
pallet_name: impl Into<Cow<'a, str>>,
entry_name: impl Into<Cow<'a, str>>,
) -> DynamicStorageAddress<'a, Value> {
DynamicStorageAddress {
pallet_name: pallet_name.into(),
entry_name: entry_name.into(),
storage_entry_keys: vec![],
}
}
/// Construct a new dynamic storage lookup.
pub fn dynamic<'a, Encodable: EncodeWithMetadata>(
pallet_name: impl Into<Cow<'a, str>>,
entry_name: impl Into<Cow<'a, str>>,
storage_entry_keys: Vec<Encodable>,
) -> DynamicStorageAddress<'a, Encodable> {
DynamicStorageAddress {
pallet_name: pallet_name.into(),
entry_name: entry_name.into(),
storage_entry_keys,
}
}
impl<'a, Encodable> StorageAddress for DynamicStorageAddress<'a, Encodable>
where
Encodable: EncodeWithMetadata,
{
type Target = DecodedValueThunk;
// For dynamic types, we have no static guarantees about any of
// this stuff, so we just allow it and let it fail at runtime:
type IsFetchable = Yes;
type IsDefaultable = Yes;
type IsIterable = Yes;
fn pallet_name(&self) -> &str {
&self.pallet_name
@@ -239,11 +183,9 @@ where
// If the key is not a tuple, encode a single value to the key type.
let type_ids = match ty.type_def() {
TypeDef::Tuple(tuple) => {
tuple.fields().iter().map(|f| f.id()).collect()
}
_other => {
vec![key.id()]
either::Either::Left(tuple.fields().iter().map(|f| f.id()))
}
_other => either::Either::Right(std::iter::once(key.id())),
};
if type_ids.len() != self.storage_entry_keys.len() {
@@ -257,19 +199,19 @@ where
if hashers.len() == 1 {
// One hasher; hash a tuple of all SCALE encoded bytes with the one hash function.
let mut input = Vec::new();
for (key, type_id) in self.storage_entry_keys.iter().zip(type_ids) {
let iter = self.storage_entry_keys.iter().zip(type_ids);
for (key, type_id) in iter {
key.encode_with_metadata(type_id, metadata, &mut input)?;
}
super::storage_map_key::hash_bytes(&input, &hashers[0], bytes);
hash_bytes(&input, &hashers[0], bytes);
Ok(())
} else if hashers.len() == type_ids.len() {
let iter = self.storage_entry_keys.iter().zip(type_ids).zip(hashers);
// A hasher per field; encode and hash each field independently.
for ((key, type_id), hasher) in
self.storage_entry_keys.iter().zip(type_ids).zip(hashers)
{
for ((key, type_id), hasher) in iter {
let mut input = Vec::new();
key.encode_with_metadata(type_id, metadata, &mut input)?;
super::storage_map_key::hash_bytes(&input, hasher, bytes);
hash_bytes(&input, hasher, bytes);
}
Ok(())
} else {
@@ -283,4 +225,68 @@ where
}
}
}
fn validation_hash(&self) -> Option<[u8; 32]> {
self.validation_hash
}
}
/// A static storage key; this is some pre-encoded bytes
/// likely provided by the generated interface.
pub struct StaticStorageMapKey(pub Vec<u8>);
impl StaticStorageMapKey {
/// Create a new [`StaticStorageMapKey`] by pre-encoding static data.
pub fn new<Encodable: codec::Encode>(value: Encodable) -> StaticStorageMapKey {
Self(value.encode())
}
}
impl EncodeWithMetadata for StaticStorageMapKey {
fn encode_with_metadata(
&self,
_type_id: u32,
_metadata: &Metadata,
bytes: &mut Vec<u8>,
) -> Result<(), Error> {
// We just use the already-encoded bytes for a static storage key:
bytes.extend(&self.0);
Ok(())
}
}
/// Construct a new dynamic storage lookup to the root of some entry.
pub fn dynamic_root(
pallet_name: impl Into<String>,
entry_name: impl Into<String>,
) -> DynamicAddress<Value> {
DynamicAddress::new(pallet_name, entry_name, vec![])
}
/// Construct a new dynamic storage lookup.
pub fn dynamic<StorageKey: EncodeWithMetadata>(
pallet_name: impl Into<String>,
entry_name: impl Into<String>,
storage_entry_keys: Vec<StorageKey>,
) -> DynamicAddress<StorageKey> {
DynamicAddress::new(pallet_name, entry_name, storage_entry_keys)
}
/// Take some SCALE encoded bytes and a [`StorageHasher`] and hash the bytes accordingly.
fn hash_bytes(input: &[u8], hasher: &StorageHasher, bytes: &mut Vec<u8>) {
match hasher {
StorageHasher::Identity => bytes.extend(input),
StorageHasher::Blake2_128 => bytes.extend(sp_core_hashing::blake2_128(input)),
StorageHasher::Blake2_128Concat => {
bytes.extend(sp_core_hashing::blake2_128(input));
bytes.extend(input);
}
StorageHasher::Blake2_256 => bytes.extend(sp_core_hashing::blake2_256(input)),
StorageHasher::Twox128 => bytes.extend(sp_core_hashing::twox_128(input)),
StorageHasher::Twox256 => bytes.extend(sp_core_hashing::twox_256(input)),
StorageHasher::Twox64Concat => {
bytes.extend(sp_core_hashing::twox_64(input));
bytes.extend(input);
}
}
}
+22
View File
@@ -7,6 +7,7 @@ use super::{
validate_storage_address,
Storage,
},
utils,
StorageAddress,
};
@@ -57,6 +58,27 @@ where
) -> Result<(), Error> {
validate_storage_address(address, &self.client.metadata())
}
/// Convert some storage address into the raw bytes that would be submitted to the node in order
/// to retrieve the entries at the root of the associated address.
pub fn address_root_bytes<Address: StorageAddress>(
&self,
address: &Address,
) -> Vec<u8> {
utils::storage_address_root_bytes(address)
}
/// Convert some storage address into the raw bytes that would be submitted to the node in order
/// to retrieve an entry. This fails if [`StorageAddress::append_entry_bytes`] does; in the built-in
/// implementation this would be if the pallet and storage entry being asked for is not available on the
/// node you're communicating with, or if the metadata is missing some type information (which should not
/// happen).
pub fn address_bytes<Address: StorageAddress>(
&self,
address: &Address,
) -> Result<Vec<u8>, Error> {
utils::storage_address_bytes(address, &self.client.metadata())
}
}
impl<T, Client> StorageClient<T, Client>
-52
View File
@@ -1,52 +0,0 @@
// Copyright 2019-2022 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use codec::Encode;
// We use this type a bunch, so export it from here.
pub use frame_metadata::StorageHasher;
/// Storage key for a Map.
#[derive(Clone)]
pub struct StorageMapKey {
value: Vec<u8>,
hasher: StorageHasher,
}
impl StorageMapKey {
/// Create a new [`StorageMapKey`] by pre-encoding static data and pairing it with a hasher.
pub fn new<Encodable: Encode>(
value: Encodable,
hasher: StorageHasher,
) -> StorageMapKey {
Self {
value: value.encode(),
hasher,
}
}
/// Convert this [`StorageMapKey`] into bytes and append them to some existing bytes.
pub fn to_bytes(&self, bytes: &mut Vec<u8>) {
hash_bytes(&self.value, &self.hasher, bytes)
}
}
/// Take some SCALE encoded bytes and a [`StorageHasher`] and hash the bytes accordingly.
pub(super) fn hash_bytes(input: &[u8], hasher: &StorageHasher, bytes: &mut Vec<u8>) {
match hasher {
StorageHasher::Identity => bytes.extend(input),
StorageHasher::Blake2_128 => bytes.extend(sp_core_hashing::blake2_128(input)),
StorageHasher::Blake2_128Concat => {
bytes.extend(sp_core_hashing::blake2_128(input));
bytes.extend(input);
}
StorageHasher::Blake2_256 => bytes.extend(sp_core_hashing::blake2_256(input)),
StorageHasher::Twox128 => bytes.extend(sp_core_hashing::twox_128(input)),
StorageHasher::Twox256 => bytes.extend(sp_core_hashing::twox_256(input)),
StorageHasher::Twox64Concat => {
bytes.extend(sp_core_hashing::twox_64(input));
bytes.extend(input);
}
}
}
+39 -45
View File
@@ -7,10 +7,7 @@ use super::storage_address::{
Yes,
};
use crate::{
client::{
OfflineClientT,
OnlineClientT,
},
client::OnlineClientT,
error::Error,
metadata::{
DecodeWithMetadata,
@@ -50,32 +47,16 @@ impl<T: Config, Client> Storage<T, Client> {
}
}
impl<T, Client> Storage<T, Client>
where
T: Config,
Client: OfflineClientT<T>,
{
/// Run the validation logic against some storage address you'd like to access.
///
/// Method has the same meaning as [`StorageClient::validate`](super::storage_client::StorageClient::validate).
pub fn validate<Address: StorageAddress>(
&self,
address: &Address,
) -> Result<(), Error> {
validate_storage_address(address, &self.client.metadata())
}
}
impl<T, Client> Storage<T, Client>
where
T: Config,
Client: OnlineClientT<T>,
{
/// Fetch the raw encoded value at the address/key given.
pub fn fetch_raw<'a>(
pub fn fetch_raw<'address>(
&self,
key: &'a [u8],
) -> impl Future<Output = Result<Option<Vec<u8>>, Error>> + 'a {
key: &'address [u8],
) -> impl Future<Output = Result<Option<Vec<u8>>, Error>> + 'address {
let client = self.client.clone();
let block_hash = self.block_hash;
// Ensure that the returned future doesn't have a lifetime tied to api.storage(),
@@ -116,14 +97,12 @@ where
/// println!("Value: {:?}", value);
/// # }
/// ```
pub fn fetch<'a, Address>(
pub fn fetch<'address, Address>(
&self,
address: &'a Address,
) -> impl Future<
Output = Result<Option<<Address::Target as DecodeWithMetadata>::Target>, Error>,
> + 'a
address: &'address Address,
) -> impl Future<Output = Result<Option<Address::Target>, Error>> + 'address
where
Address: StorageAddress<IsFetchable = Yes> + 'a,
Address: StorageAddress<IsFetchable = Yes> + 'address,
{
let client = self.clone();
async move {
@@ -131,13 +110,13 @@ where
// is likely to actually correspond to a real storage entry or not.
// if not, it means static codegen doesn't line up with runtime
// metadata.
client.validate(address)?;
validate_storage_address(address, &client.client.metadata())?;
// Look up the return type ID to enable DecodeWithMetadata:
let metadata = client.client.metadata();
let lookup_bytes = super::utils::storage_address_bytes(address, &metadata)?;
if let Some(data) = client.fetch_raw(&lookup_bytes).await? {
let val = <Address::Target as DecodeWithMetadata>::decode_storage_with_metadata(
let val = decode_storage_with_metadata::<Address::Target>(
&mut &*data,
address.pallet_name(),
address.entry_name(),
@@ -151,13 +130,12 @@ where
}
/// Fetch a StorageKey that has a default value with an optional block hash.
pub fn fetch_or_default<'a, Address>(
pub fn fetch_or_default<'address, Address>(
&self,
address: &'a Address,
) -> impl Future<Output = Result<<Address::Target as DecodeWithMetadata>::Target, Error>>
+ 'a
address: &'address Address,
) -> impl Future<Output = Result<Address::Target, Error>> + 'address
where
Address: StorageAddress<IsFetchable = Yes, IsDefaultable = Yes> + 'a,
Address: StorageAddress<IsFetchable = Yes, IsDefaultable = Yes> + 'address,
{
let client = self.clone();
async move {
@@ -176,7 +154,7 @@ where
return_type_from_storage_entry_type(&storage_metadata.ty);
let bytes = &mut &storage_metadata.default[..];
let val = <Address::Target as DecodeWithMetadata>::decode_with_metadata(
let val = Address::Target::decode_with_metadata(
bytes,
return_ty_id,
&metadata,
@@ -189,12 +167,12 @@ where
/// Fetch up to `count` keys for a storage map in lexicographic order.
///
/// Supports pagination by passing a value to `start_key`.
pub fn fetch_keys<'a>(
pub fn fetch_keys<'address>(
&self,
key: &'a [u8],
key: &'address [u8],
count: u32,
start_key: Option<&'a [u8]>,
) -> impl Future<Output = Result<Vec<StorageKey>, Error>> + 'a {
start_key: Option<&'address [u8]>,
) -> impl Future<Output = Result<Vec<StorageKey>, Error>> + 'address {
let client = self.client.clone();
let block_hash = self.block_hash;
async move {
@@ -252,7 +230,7 @@ where
// is likely to actually correspond to a real storage entry or not.
// if not, it means static codegen doesn't line up with runtime
// metadata.
client.validate(&address)?;
validate_storage_address(&address, &client.client.metadata())?;
let metadata = client.client.metadata();
@@ -303,9 +281,7 @@ where
ReturnTy: DecodeWithMetadata,
{
/// Returns the next key value pair from a map.
pub async fn next(
&mut self,
) -> Result<Option<(StorageKey, ReturnTy::Target)>, Error> {
pub async fn next(&mut self) -> Result<Option<(StorageKey, ReturnTy)>, Error> {
loop {
if let Some((k, v)) = self.buffer.pop() {
let val = ReturnTy::decode_with_metadata(
@@ -402,3 +378,21 @@ fn return_type_from_storage_entry_type(entry: &StorageEntryType<PortableForm>) -
StorageEntryType::Map { value, .. } => value.id(),
}
}
/// Given some bytes, a pallet and storage name, decode the response.
fn decode_storage_with_metadata<T: DecodeWithMetadata>(
bytes: &mut &[u8],
pallet_name: &str,
storage_entry: &str,
metadata: &Metadata,
) -> Result<T, Error> {
let ty = &metadata.pallet(pallet_name)?.storage(storage_entry)?.ty;
let id = match ty {
StorageEntryType::Plain(ty) => ty.id(),
StorageEntryType::Map { value, .. } => value.id(),
};
let val = T::decode_with_metadata(bytes, id, metadata)?;
Ok(val)
}
+5 -3
View File
@@ -14,7 +14,7 @@ use crate::{
/// Return the root of a given [`StorageAddress`]: hash the pallet name and entry name
/// and append those bytes to the output.
pub fn write_storage_address_root_bytes<Address: StorageAddress>(
pub(crate) fn write_storage_address_root_bytes<Address: StorageAddress>(
addr: &Address,
out: &mut Vec<u8>,
) {
@@ -24,7 +24,7 @@ pub fn write_storage_address_root_bytes<Address: StorageAddress>(
/// Outputs the [`storage_address_root_bytes`] as well as any additional bytes that represent
/// a lookup in a storage map at that location.
pub fn storage_address_bytes<Address: StorageAddress>(
pub(crate) fn storage_address_bytes<Address: StorageAddress>(
addr: &Address,
metadata: &Metadata,
) -> Result<Vec<u8>, Error> {
@@ -35,7 +35,9 @@ pub fn storage_address_bytes<Address: StorageAddress>(
}
/// Outputs a vector containing the bytes written by [`write_storage_address_root_bytes`].
pub fn storage_address_root_bytes<Address: StorageAddress>(addr: &Address) -> Vec<u8> {
pub(crate) fn storage_address_root_bytes<Address: StorageAddress>(
addr: &Address,
) -> Vec<u8> {
let mut bytes = Vec::new();
write_storage_address_root_bytes(addr, &mut bytes);
bytes
+3 -2
View File
@@ -27,8 +27,9 @@ pub use self::{
},
tx_payload::{
dynamic,
DynamicTxPayload,
StaticTxPayload,
BoxedPayload,
DynamicPayload,
Payload,
TxPayload,
},
tx_progress::{
+86 -87
View File
@@ -7,19 +7,20 @@
use crate::{
dynamic::Value,
error::{
Error,
MetadataError,
},
error::Error,
metadata::Metadata,
};
use codec::Encode;
use scale_encode::EncodeAsFields;
use scale_value::{
Composite,
ValueDef,
Variant,
};
use std::borrow::Cow;
use std::{
borrow::Cow,
sync::Arc,
};
/// This represents a transaction payload that can be submitted
/// to a node.
@@ -57,31 +58,67 @@ pub struct ValidationDetails<'a> {
pub hash: [u8; 32],
}
/// This represents a statically generated transaction payload.
/// A transaction payload containing some generic `CallData`.
#[derive(Clone, Debug)]
pub struct StaticTxPayload<CallData> {
pallet_name: &'static str,
call_name: &'static str,
pub struct Payload<CallData> {
pallet_name: Cow<'static, str>,
call_name: Cow<'static, str>,
call_data: CallData,
validation_hash: Option<[u8; 32]>,
}
impl<CallData> StaticTxPayload<CallData> {
/// Create a new [`StaticTxPayload`] from static data.
/// A boxed transaction payload.
// Dev Note: Arc used to enable easy cloning (given that we can't have dyn Clone).
pub type BoxedPayload = Payload<Arc<dyn EncodeAsFields + Send + Sync + 'static>>;
/// The type of a payload typically used for dynamic transaction payloads.
pub type DynamicPayload = Payload<Composite<()>>;
impl<CallData> Payload<CallData> {
/// Create a new [`Payload`].
pub fn new(
pallet_name: impl Into<String>,
call_name: impl Into<String>,
call_data: CallData,
) -> Self {
Payload {
pallet_name: Cow::Owned(pallet_name.into()),
call_name: Cow::Owned(call_name.into()),
call_data,
validation_hash: None,
}
}
/// Create a new [`Payload`] using static strings for the pallet and call name.
/// This is only expected to be used from codegen.
#[doc(hidden)]
pub fn new_static(
pallet_name: &'static str,
call_name: &'static str,
call_data: CallData,
validation_hash: [u8; 32],
) -> Self {
StaticTxPayload {
pallet_name,
call_name,
Payload {
pallet_name: Cow::Borrowed(pallet_name),
call_name: Cow::Borrowed(call_name),
call_data,
validation_hash: Some(validation_hash),
}
}
/// Box the payload.
pub fn boxed(self) -> BoxedPayload
where
CallData: EncodeAsFields + Send + Sync + 'static,
{
BoxedPayload {
pallet_name: self.pallet_name,
call_name: self.call_name,
call_data: Arc::new(self.call_data),
validation_hash: self.validation_hash,
}
}
/// Do not validate this call prior to submitting it.
pub fn unvalidated(self) -> Self {
Self {
@@ -96,60 +133,16 @@ impl<CallData> StaticTxPayload<CallData> {
}
}
impl<CallData: Encode> TxPayload for StaticTxPayload<CallData> {
fn encode_call_data_to(
&self,
metadata: &Metadata,
out: &mut Vec<u8>,
) -> Result<(), Error> {
let pallet = metadata.pallet(self.pallet_name)?;
let pallet_index = pallet.index();
let call_index = pallet.call_index(self.call_name)?;
pallet_index.encode_to(out);
call_index.encode_to(out);
self.call_data.encode_to(out);
Ok(())
}
fn validation_details(&self) -> Option<ValidationDetails<'_>> {
self.validation_hash.map(|hash| {
ValidationDetails {
pallet_name: self.pallet_name,
call_name: self.call_name,
hash,
}
})
}
}
/// This represents a dynamically generated transaction payload.
#[derive(Clone, Debug)]
pub struct DynamicTxPayload<'a> {
pallet_name: Cow<'a, str>,
call_name: Cow<'a, str>,
fields: Composite<()>,
}
impl<'a> DynamicTxPayload<'a> {
/// Return the pallet name.
pub fn pallet_name(&self) -> &str {
&self.pallet_name
}
/// Return the call name.
pub fn call_name(&self) -> &str {
&self.call_name
}
/// Convert the dynamic payload into a [`Value`]. This is useful
/// if you need to submit this as part of a larger call.
impl Payload<Composite<()>> {
/// Convert the dynamic `Composite` payload into a [`Value`].
/// This is useful if you want to use this as an argument for a
/// larger dynamic call that wants to use this as a nested call.
pub fn into_value(self) -> Value<()> {
let call = Value {
context: (),
value: ValueDef::Variant(Variant {
name: self.call_name.into_owned(),
values: self.fields,
values: self.call_data,
}),
};
@@ -157,37 +150,43 @@ impl<'a> DynamicTxPayload<'a> {
}
}
/// Construct a new dynamic transaction payload to submit to a node.
pub fn dynamic<'a>(
pallet_name: impl Into<Cow<'a, str>>,
call_name: impl Into<Cow<'a, str>>,
fields: impl Into<Composite<()>>,
) -> DynamicTxPayload<'a> {
DynamicTxPayload {
pallet_name: pallet_name.into(),
call_name: call_name.into(),
fields: fields.into(),
}
}
impl<'a> TxPayload for DynamicTxPayload<'a> {
impl<CallData: EncodeAsFields> TxPayload for Payload<CallData> {
fn encode_call_data_to(
&self,
metadata: &Metadata,
out: &mut Vec<u8>,
) -> Result<(), Error> {
let pallet = metadata.pallet(&self.pallet_name)?;
let call_id = pallet.call_ty_id().ok_or(MetadataError::CallNotFound)?;
let call_value = Value {
context: (),
value: ValueDef::Variant(Variant {
name: self.call_name.to_string(),
values: self.fields.clone(),
}),
};
let call = pallet.call(&self.call_name)?;
pallet.index().encode_to(out);
scale_value::scale::encode_as_type(&call_value, call_id, metadata.types(), out)?;
let pallet_index = pallet.index();
let call_index = call.index();
pallet_index.encode_to(out);
call_index.encode_to(out);
self.call_data
.encode_as_fields_to(call.fields(), metadata.types(), out)?;
Ok(())
}
fn validation_details(&self) -> Option<ValidationDetails<'_>> {
self.validation_hash.map(|hash| {
ValidationDetails {
pallet_name: &self.pallet_name,
call_name: &self.call_name,
hash,
}
})
}
}
/// Construct a transaction at runtime; essentially an alias to [`Payload::new()`]
/// which provides a [`Composite`] value for the call data.
pub fn dynamic(
pallet_name: impl Into<String>,
call_name: impl Into<String>,
call_data: impl Into<Composite<()>>,
) -> DynamicPayload {
Payload::new(pallet_name, call_name, call_data.into())
}
+12 -1
View File
@@ -18,7 +18,18 @@ use serde::{
/// A 32-byte cryptographic identifier. This is a simplified version of Substrate's
/// `sp_core::crypto::AccountId32`. To obtain more functionality, convert this into
/// that type.
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, Debug)]
#[derive(
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
Debug,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
pub struct AccountId32(pub [u8; 32]);
impl AsRef<[u8]> for AccountId32 {
+111 -62
View File
@@ -16,55 +16,53 @@ use scale_bits::{
},
Bits,
};
use scale_decode::IntoVisitor;
use std::marker::PhantomData;
macro_rules! store {
($ident: ident; $(($ty: ident, $wrapped: ty)),*) => {
/// Associates `bitvec::store::BitStore` trait with corresponding, type-erased `scale_bits::StoreFormat` enum.
///
/// Used to decode bit sequences by providing `scale_bits::StoreFormat` using
/// `bitvec`-like type type parameters.
pub trait $ident {
/// Corresponding `scale_bits::StoreFormat` value.
const FORMAT: StoreFormat;
/// Number of bits that the backing store types holds.
const BITS: u32;
}
/// Associates `bitvec::store::BitStore` trait with corresponding, type-erased `scale_bits::StoreFormat` enum.
///
/// Used to decode bit sequences by providing `scale_bits::StoreFormat` using
/// `bitvec`-like type type parameters.
pub trait BitStore {
/// Corresponding `scale_bits::StoreFormat` value.
const FORMAT: StoreFormat;
/// Number of bits that the backing store types holds.
const BITS: u32;
}
macro_rules! impl_store {
($ty:ident, $wrapped:ty) => {
impl BitStore for $wrapped {
const FORMAT: StoreFormat = StoreFormat::$ty;
const BITS: u32 = <$wrapped>::BITS;
}
};
}
impl_store!(U8, u8);
impl_store!(U16, u16);
impl_store!(U32, u32);
impl_store!(U64, u64);
$(
impl $ident for $wrapped {
const FORMAT: StoreFormat = StoreFormat::$ty;
const BITS: u32 = <$wrapped>::BITS;
}
)*
};
}
macro_rules! order {
($ident: ident; $($ty: ident),*) => {
/// Associates `bitvec::order::BitOrder` trait with corresponding, type-erased `scale_bits::OrderFormat` enum.
///
/// Used to decode bit sequences in runtime by providing `scale_bits::OrderFormat` using
/// `bitvec`-like type type parameters.
pub trait $ident {
/// Corresponding `scale_bits::OrderFormat` value.
const FORMAT: OrderFormat;
}
$(
#[doc = concat!("Type-level value that corresponds to `scale_bits::OrderFormat::", stringify!($ty), "` at run-time")]
#[doc = concat!(" and `bitvec::order::BitOrder::", stringify!($ty), "` at the type level.")]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum $ty {}
impl $ident for $ty {
const FORMAT: OrderFormat = OrderFormat::$ty;
}
)*
};
}
store!(BitStore; (U8, u8), (U16, u16), (U32, u32), (U64, u64));
order!(BitOrder; Lsb0, Msb0);
/// Associates `bitvec::order::BitOrder` trait with corresponding, type-erased `scale_bits::OrderFormat` enum.
///
/// Used to decode bit sequences in runtime by providing `scale_bits::OrderFormat` using
/// `bitvec`-like type type parameters.
pub trait BitOrder {
/// Corresponding `scale_bits::OrderFormat` value.
const FORMAT: OrderFormat;
}
macro_rules! impl_order {
($ty:ident) => {
#[doc = concat!("Type-level value that corresponds to `scale_bits::OrderFormat::", stringify!($ty), "` at run-time")]
#[doc = concat!(" and `bitvec::order::BitOrder::", stringify!($ty), "` at the type level.")]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum $ty {}
impl BitOrder for $ty {
const FORMAT: OrderFormat = OrderFormat::$ty;
}
};
}
impl_order!(Lsb0);
impl_order!(Msb0);
/// Constructs a run-time format parameters based on the corresponding type-level parameters.
fn bit_format<Store: BitStore, Order: BitOrder>() -> Format {
@@ -77,29 +75,29 @@ fn bit_format<Store: BitStore, Order: BitOrder>() -> Format {
/// `scale_bits::Bits` generic over the bit store (`u8`/`u16`/`u32`/`u64`) and bit order (LSB, MSB)
/// used for SCALE encoding/decoding. Uses `scale_bits::Bits`-default `u8` and LSB format underneath.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DecodedBits<Store: BitStore, Order: BitOrder>(
Bits,
PhantomData<Store>,
PhantomData<Order>,
);
pub struct DecodedBits<Store, Order> {
bits: Bits,
_marker: PhantomData<(Store, Order)>,
}
impl<Store: BitStore, Order: BitOrder> DecodedBits<Store, Order> {
impl<Store, Order> DecodedBits<Store, Order> {
/// Extracts the underlying `scale_bits::Bits` value.
pub fn into_bits(self) -> Bits {
self.0
self.bits
}
/// References the underlying `scale_bits::Bits` value.
pub fn as_bits(&self) -> &Bits {
&self.0
&self.bits
}
}
impl<Store: BitStore, Order: BitOrder> core::iter::FromIterator<bool>
for DecodedBits<Store, Order>
{
impl<Store, Order> core::iter::FromIterator<bool> for DecodedBits<Store, Order> {
fn from_iter<T: IntoIterator<Item = bool>>(iter: T) -> Self {
DecodedBits(Bits::from_iter(iter), PhantomData, PhantomData)
DecodedBits {
bits: Bits::from_iter(iter),
_marker: PhantomData,
}
}
}
@@ -132,21 +130,72 @@ impl<Store: BitStore, Order: BitOrder> codec::Decode for DecodedBits<Store, Orde
let bits = decoder.collect::<Result<Vec<_>, _>>()?;
let bits = Bits::from_iter(bits);
Ok(DecodedBits(bits, PhantomData, PhantomData))
Ok(DecodedBits {
bits,
_marker: PhantomData,
})
}
}
impl<Store: BitStore, Order: BitOrder> codec::Encode for DecodedBits<Store, Order> {
fn size_hint(&self) -> usize {
self.0.size_hint()
self.bits.size_hint()
}
fn encoded_size(&self) -> usize {
self.0.encoded_size()
self.bits.encoded_size()
}
fn encode(&self) -> Vec<u8> {
scale_bits::encode_using_format(self.0.iter(), bit_format::<Store, Order>())
scale_bits::encode_using_format(self.bits.iter(), bit_format::<Store, Order>())
}
}
#[doc(hidden)]
pub struct DecodedBitsVisitor<S, O>(std::marker::PhantomData<(S, O)>);
impl<Store, Order> scale_decode::Visitor for DecodedBitsVisitor<Store, Order> {
type Value<'scale, 'info> = DecodedBits<Store, Order>;
type Error = scale_decode::Error;
fn unchecked_decode_as_type<'scale, 'info>(
self,
input: &mut &'scale [u8],
type_id: scale_decode::visitor::TypeId,
types: &'info scale_info::PortableRegistry,
) -> scale_decode::visitor::DecodeAsTypeResult<
Self,
Result<Self::Value<'scale, 'info>, Self::Error>,
> {
let res = scale_decode::visitor::decode_with_visitor(
input,
type_id.0,
types,
Bits::into_visitor(),
)
.map(|bits| {
DecodedBits {
bits,
_marker: PhantomData,
}
});
scale_decode::visitor::DecodeAsTypeResult::Decoded(res)
}
}
impl<Store, Order> scale_decode::IntoVisitor for DecodedBits<Store, Order> {
type Visitor = DecodedBitsVisitor<Store, Order>;
fn into_visitor() -> Self::Visitor {
DecodedBitsVisitor(PhantomData)
}
}
impl<Store, Order> scale_encode::EncodeAsType for DecodedBits<Store, Order> {
fn encode_as_type_to(
&self,
type_id: u32,
types: &scale_info::PortableRegistry,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
self.bits.encode_as_type_to(type_id, types, out)
}
}
+5 -49
View File
@@ -4,14 +4,14 @@
//! Miscellaneous utility helpers.
pub mod account_id;
mod account_id;
pub mod bits;
pub mod multi_address;
pub mod multi_signature;
mod multi_address;
mod multi_signature;
mod wrapper_opaque;
use codec::{
Decode,
DecodeAll,
Encode,
};
use derivative::Derivative;
@@ -19,6 +19,7 @@ use derivative::Derivative;
pub use account_id::AccountId32;
pub use multi_address::MultiAddress;
pub use multi_signature::MultiSignature;
pub use wrapper_opaque::WrapperKeepOpaque;
// Used in codegen
#[doc(hidden)]
@@ -39,51 +40,6 @@ impl codec::Encode for Encoded {
}
}
/// A wrapper for any type `T` which implement encode/decode in a way compatible with `Vec<u8>`.
///
/// [`WrapperKeepOpaque`] stores the type only in its opaque format, aka as a `Vec<u8>`. To
/// access the real type `T` [`Self::try_decode`] needs to be used.
#[derive(Derivative, Encode, Decode)]
#[derivative(
Debug(bound = ""),
Clone(bound = ""),
PartialEq(bound = ""),
Eq(bound = ""),
Default(bound = ""),
Hash(bound = "")
)]
pub struct WrapperKeepOpaque<T> {
data: Vec<u8>,
_phantom: PhantomDataSendSync<T>,
}
impl<T: Decode> WrapperKeepOpaque<T> {
/// Try to decode the wrapped type from the inner `data`.
///
/// Returns `None` if the decoding failed.
pub fn try_decode(&self) -> Option<T> {
T::decode_all(&mut &self.data[..]).ok()
}
/// Returns the length of the encoded `T`.
pub fn encoded_len(&self) -> usize {
self.data.len()
}
/// Returns the encoded data.
pub fn encoded(&self) -> &[u8] {
&self.data
}
/// Create from the given encoded `data`.
pub fn from_encoded(data: Vec<u8>) -> Self {
Self {
data,
_phantom: PhantomDataSendSync::new(),
}
}
}
/// A version of [`std::marker::PhantomData`] that is also Send and Sync (which is fine
/// because regardless of the generic param, it is always possible to Send + Sync this
/// 0 size type).
+12 -1
View File
@@ -14,7 +14,18 @@ use codec::{
/// A multi-format address wrapper for on-chain accounts. This is a simplified version of Substrate's
/// `sp_runtime::MultiAddress`. To obtain more functionality, convert this into that type (this conversion
/// functionality is provided via `From` impls if the `substrate-compat` feature is enabled).
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, Debug)]
#[derive(
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
Debug,
scale_encode::EncodeAsType,
scale_decode::DecodeAsType,
)]
pub enum MultiAddress<AccountId, AccountIndex> {
/// It's an account ID (pubkey).
Id(AccountId),
+266
View File
@@ -0,0 +1,266 @@
// Copyright 2019-2022 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use super::PhantomDataSendSync;
use codec::{
Compact,
Decode,
DecodeAll,
Encode,
};
use derivative::Derivative;
use scale_decode::{
IntoVisitor,
Visitor,
};
use scale_encode::EncodeAsType;
/// A wrapper for any type `T` which implement encode/decode in a way compatible with `Vec<u8>`.
/// [`WrapperKeepOpaque`] stores the type only in its opaque format, aka as a `Vec<u8>`. To
/// access the real type `T` [`Self::try_decode`] needs to be used.
// Dev notes:
//
// - This is adapted from [here](https://github.com/paritytech/substrate/blob/master/frame/support/src/traits/misc.rs).
// - The encoded bytes will be a compact encoded length followed by that number of bytes.
// - However, the TypeInfo describes the type as a composite with first a compact encoded length and next the type itself.
// [`Encode`] and [`Decode`] impls will "just work" to take this into a `Vec<u8>`, but we need a custom [`EncodeAsType`]
// and [`Visitor`] implementation to encode and decode based on TypeInfo.
#[derive(Derivative, Encode, Decode)]
#[derivative(
Debug(bound = ""),
Clone(bound = ""),
PartialEq(bound = ""),
Eq(bound = ""),
Default(bound = ""),
Hash(bound = "")
)]
pub struct WrapperKeepOpaque<T> {
data: Vec<u8>,
_phantom: PhantomDataSendSync<T>,
}
impl<T> WrapperKeepOpaque<T> {
/// Try to decode the wrapped type from the inner `data`.
///
/// Returns `None` if the decoding failed.
pub fn try_decode(&self) -> Option<T>
where
T: Decode,
{
T::decode_all(&mut &self.data[..]).ok()
}
/// Returns the length of the encoded `T`.
pub fn encoded_len(&self) -> usize {
self.data.len()
}
/// Returns the encoded data.
pub fn encoded(&self) -> &[u8] {
&self.data
}
/// Create from the given encoded `data`.
pub fn from_encoded(data: Vec<u8>) -> Self {
Self {
data,
_phantom: PhantomDataSendSync::new(),
}
}
/// Create from some raw value by encoding it.
pub fn from_value(value: T) -> Self
where
T: Encode,
{
Self {
data: value.encode(),
_phantom: PhantomDataSendSync::new(),
}
}
}
impl<T> EncodeAsType for WrapperKeepOpaque<T> {
fn encode_as_type_to(
&self,
type_id: u32,
types: &scale_info::PortableRegistry,
out: &mut Vec<u8>,
) -> Result<(), scale_encode::Error> {
use scale_encode::error::{
Error,
ErrorKind,
Kind,
};
let Some(ty) = types.resolve(type_id) else {
return Err(Error::new(ErrorKind::TypeNotFound(type_id)))
};
// Do a basic check that the target shape lines up.
let scale_info::TypeDef::Composite(_) = ty.type_def() else {
return Err(Error::new(ErrorKind::WrongShape {
actual: Kind::Struct,
expected: type_id,
}))
};
// Check that the name also lines up.
if ty.path().ident().as_deref() != Some("WrapperKeepOpaque") {
return Err(Error::new(ErrorKind::WrongShape {
actual: Kind::Struct,
expected: type_id,
}))
}
// Just blat the bytes out.
self.data.encode_to(out);
Ok(())
}
}
pub struct WrapperKeepOpaqueVisitor<T>(std::marker::PhantomData<T>);
impl<T> Visitor for WrapperKeepOpaqueVisitor<T> {
type Value<'scale, 'info> = WrapperKeepOpaque<T>;
type Error = scale_decode::Error;
fn visit_composite<'scale, 'info>(
self,
value: &mut scale_decode::visitor::types::Composite<'scale, 'info>,
_type_id: scale_decode::visitor::TypeId,
) -> Result<Self::Value<'scale, 'info>, Self::Error> {
use scale_decode::error::{
Error,
ErrorKind,
};
if value.path().ident().as_deref() != Some("WrapperKeepOpaque") {
return Err(Error::new(ErrorKind::Custom(
"Type to decode is not 'WrapperTypeKeepOpaque'".into(),
)))
}
if value.remaining() != 2 {
return Err(Error::new(ErrorKind::WrongLength {
actual_len: value.remaining(),
expected_len: 2,
}))
}
// The field to decode is a compact len followed by bytes. Decode the length, then grab the bytes.
let Compact(len) = value
.decode_item(Compact::<u32>::into_visitor())
.expect("length checked")?;
let field = value.next().expect("length checked")?;
// Sanity check that the compact length we decoded lines up with the number of bytes encoded in the next field.
if field.bytes().len() != len as usize {
return Err(Error::new(ErrorKind::Custom("WrapperTypeKeepOpaque compact encoded length doesn't line up with encoded byte len".into())));
}
Ok(WrapperKeepOpaque {
data: field.bytes().to_vec(),
_phantom: PhantomDataSendSync::new(),
})
}
}
impl<T> IntoVisitor for WrapperKeepOpaque<T> {
type Visitor = WrapperKeepOpaqueVisitor<T>;
fn into_visitor() -> Self::Visitor {
WrapperKeepOpaqueVisitor(std::marker::PhantomData)
}
}
#[cfg(test)]
mod test {
use scale_decode::DecodeAsType;
use super::*;
// Copied from https://github.com/paritytech/substrate/blob/master/frame/support/src/traits/misc.rs
// and used for tests to check that we can work with the expected TypeInfo without needing to import
// the frame_support crate, which has quite a lot of dependencies.
impl<T: scale_info::TypeInfo + 'static> scale_info::TypeInfo for WrapperKeepOpaque<T> {
type Identity = Self;
fn type_info() -> scale_info::Type {
use scale_info::{
build::Fields,
meta_type,
Path,
Type,
TypeParameter,
};
Type::builder()
.path(Path::new("WrapperKeepOpaque", module_path!()))
.type_params(vec![TypeParameter::new("T", Some(meta_type::<T>()))])
.composite(
Fields::unnamed()
.field(|f| f.compact::<u32>())
.field(|f| f.ty::<T>().type_name("T")),
)
}
}
/// Given a type definition, return type ID and registry representing it.
fn make_type<T: scale_info::TypeInfo + 'static>(
) -> (u32, scale_info::PortableRegistry) {
let m = scale_info::MetaType::new::<T>();
let mut types = scale_info::Registry::new();
let id = types.register_type(&m);
let portable_registry: scale_info::PortableRegistry = types.into();
(id.id(), portable_registry)
}
fn roundtrips_like_scale_codec<T>(t: T)
where
T: EncodeAsType
+ DecodeAsType
+ Encode
+ Decode
+ PartialEq
+ std::fmt::Debug
+ scale_info::TypeInfo
+ 'static,
{
let (type_id, types) = make_type::<T>();
let scale_codec_encoded = t.encode();
let encode_as_type_encoded = t.encode_as_type(type_id, &types).unwrap();
assert_eq!(
scale_codec_encoded, encode_as_type_encoded,
"encoded bytes should match"
);
let decode_as_type_bytes = &mut &*scale_codec_encoded;
let decoded_as_type = T::decode_as_type(decode_as_type_bytes, type_id, &types)
.expect("decode-as-type decodes");
let decode_scale_codec_bytes = &mut &*scale_codec_encoded;
let decoded_scale_codec =
T::decode(decode_scale_codec_bytes).expect("scale-codec decodes");
assert!(
decode_as_type_bytes.is_empty(),
"no bytes should remain in decode-as-type impl"
);
assert!(
decode_scale_codec_bytes.is_empty(),
"no bytes should remain in codec-decode impl"
);
assert_eq!(
decoded_as_type, decoded_scale_codec,
"decoded values should match"
);
}
#[test]
fn wrapper_keep_opaque_roundtrips_ok() {
roundtrips_like_scale_codec(WrapperKeepOpaque::from_value(123u64));
roundtrips_like_scale_codec(WrapperKeepOpaque::from_value(true));
roundtrips_like_scale_codec(WrapperKeepOpaque::from_value(vec![1u8, 2, 3, 4]));
}
}
-1
View File
@@ -25,7 +25,6 @@ regex = "1.5.0"
scale-info = { version = "2.0.0", features = ["bit-vec"] }
sp-core = { version = "18.0.0", default-features = false }
sp-keyring = "20.0.0"
sp-runtime = "20.0.0"
syn = "1.0.109"
subxt = { version = "0.27.1", path = "../../subxt" }
subxt-codegen = { version = "0.27.1", path = "../../codegen" }
+4 -2
View File
@@ -466,10 +466,12 @@ async fn chainhead_unstable_storage() {
let sub_id = blocks.subscription_id().unwrap().clone();
let alice: AccountId32 = AccountKeyring::Alice.to_account_id().into();
let addr = node_runtime::storage().system().account(alice).to_bytes();
let addr = node_runtime::storage().system().account(alice);
let addr_bytes = api.storage().address_bytes(&addr).unwrap();
let mut sub = api
.rpc()
.chainhead_unstable_storage(sub_id, hash, &addr, None)
.chainhead_unstable_storage(sub_id, hash, &addr_bytes, None)
.await
.unwrap();
let event = sub.next().await.unwrap().unwrap();
+4 -3
View File
@@ -5,10 +5,11 @@
/// Checks that code generated by `subxt-cli codegen` compiles. Allows inspection of compiler errors
/// directly, more accurately than via the macro and `cargo expand`.
///
/// Generate by:
/// Generate by running this at the root of the repository:
///
/// - run `polkadot --dev --tmp` node locally
/// - `cargo run -p subxt-cli -- codegen | rustfmt > testing/integration-tests/src/codegen/polkadot.rs`
/// ```
/// cargo run --bin subxt -- codegen --file artifacts/polkadot_metadata.scale | rustfmt > testing/integration-tests/src/codegen/polkadot.rs
/// ```
#[rustfmt::skip]
#[allow(clippy::all)]
mod polkadot;
File diff suppressed because one or more lines are too long
@@ -219,6 +219,8 @@ async fn tx_call() {
.contracts()
.contract_info_of(&contract);
let info_addr_bytes = cxt.client().storage().address_bytes(&info_addr).unwrap();
let contract_info = cxt
.client()
.storage()
@@ -235,7 +237,7 @@ async fn tx_call() {
.at(None)
.await
.unwrap()
.fetch_keys(&info_addr.to_bytes(), 10, None)
.fetch_keys(&info_addr_bytes, 10, None)
.await
.unwrap()
.iter()
@@ -5,9 +5,12 @@
use crate::{
node_runtime::{
self,
runtime_types::pallet_staking::{
RewardDestination,
ValidatorPrefs,
runtime_types::{
pallet_staking::{
RewardDestination,
ValidatorPrefs,
},
sp_arithmetic::per_things::Perbill,
},
staking,
},
@@ -33,7 +36,7 @@ fn get_from_seed(seed: &str) -> sr25519::Pair {
fn default_validator_prefs() -> ValidatorPrefs {
ValidatorPrefs {
commission: sp_runtime::Perbill::default(),
commission: Perbill(0),
blocked: false,
}
}
+6 -3
View File
@@ -70,11 +70,14 @@ async fn storage_n_mapish_key_is_properly_created() -> Result<(), subxt::Error>
use codec::Encode;
use node_runtime::runtime_types::sp_core::crypto::KeyTypeId;
let ctx = test_context().await;
let api = ctx.client();
// This is what the generated code hashes a `session().key_owner(..)` key into:
let actual_key_bytes = node_runtime::storage()
let actual_key = node_runtime::storage()
.session()
.key_owner(KeyTypeId([1, 2, 3, 4]), [5u8, 6, 7, 8])
.to_bytes();
.key_owner(KeyTypeId([1, 2, 3, 4]), [5u8, 6, 7, 8]);
let actual_key_bytes = api.storage().address_bytes(&actual_key)?;
// Let's manually hash to what we assume it should be and compare:
let expected_key_bytes = {
-4
View File
@@ -90,10 +90,6 @@ async fn run() {
#[subxt::subxt(
runtime_metadata_path = "{}",
derive_for_all_types = "Eq, PartialEq",
substitute_type(
type = "sp_arithmetic::per_things::Perbill",
with = "::sp_runtime::Perbill"
)
)]
pub mod node_runtime {{}}
"#,
+11 -3
View File
@@ -4,11 +4,19 @@ use codec::{Decode, Encode};
use subxt::utils::AccountId32;
#[derive(Encode, Decode, Debug)]
#[derive(Encode, Decode, subxt::ext::scale_encode::EncodeAsType, subxt::ext::scale_decode::DecodeAsType, Debug)]
#[encode_as_type(crate_path = "subxt::ext::scale_encode")]
#[decode_as_type(crate_path = "subxt::ext::scale_decode")]
pub struct CustomAddress(u16);
#[derive(Encode, Decode, Debug)]
#[derive(Encode, Decode, subxt::ext::scale_encode::EncodeAsType, subxt::ext::scale_decode::DecodeAsType, Debug)]
#[encode_as_type(crate_path = "subxt::ext::scale_encode")]
#[decode_as_type(crate_path = "subxt::ext::scale_decode")]
pub struct Generic<T>(T);
#[derive(Encode, Decode, Debug)]
#[derive(Encode, Decode, subxt::ext::scale_encode::EncodeAsType, subxt::ext::scale_decode::DecodeAsType, Debug)]
#[encode_as_type(crate_path = "subxt::ext::scale_encode")]
#[decode_as_type(crate_path = "subxt::ext::scale_decode")]
pub struct Second<T, U>(U, PhantomData<T>);
#[subxt::subxt(
+11 -1
View File
@@ -41,7 +41,17 @@ pub fn generate_metadata_from_pallets_custom_dispatch_error<
let mut registry = scale_info::Registry::new();
let pallets = registry.map_into_portable(pallets);
let extrinsic = extrinsic.into_portable(&mut registry);
let ty = registry.register_type(&meta_type::<()>());
#[derive(TypeInfo)]
struct Runtime;
#[derive(TypeInfo)]
enum RuntimeCall {}
#[derive(TypeInfo)]
enum RuntimeEvent {}
let ty = registry.register_type(&meta_type::<Runtime>());
registry.register_type(&meta_type::<RuntimeCall>());
registry.register_type(&meta_type::<RuntimeEvent>());
// Metadata needs to contain this DispatchError, since codegen looks for it.
registry.register_type(&meta_type::<DispatchError>());