mirror of
https://github.com/pezkuwichain/pezkuwi-subxt.git
synced 2026-04-26 11:07:56 +00:00
1a88833d73
* impl_runtime_apis: Generate getters for `metadata_at` functions Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * runtime: Implement new `Metadata` runtime trait Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * runtime: Move `metadata_at` functions to construct_runtime macro Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * contruct_runtime: Use `OpaqueMetadata` from hidden imports Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * Adjust testing Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/tests: Add tests for the new API Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/tests: Adjust metdata naming Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Expose `metadata-v14` feature flag Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Expose metadata only under feature flags Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Expose v14 metadata by default Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Expose metadata feature for testing Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Test metadata under different feature flags Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * Update primitives/api/src/lib.rs Co-authored-by: Bastian Köcher <git@kchr.de> * Update primitives/api/src/lib.rs Co-authored-by: Bastian Köcher <git@kchr.de> * client/tests: Adjust testing to reflect trait Metadata change Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/metadata-ir: Add intermediate representation types for metadata Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/metadata-ir: Convert metadata to V14 Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/metadata-ir: Add API to convert metadata to multiple versions Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/metadata-ir: Expose V14 under feature flag Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Adjust to metadata IR Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: More adjustments Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Guard v14 details under feature flag Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Adjust testing Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * CI: Ensure `quick-benchmarks` uses `metadata-v14` Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Use `metadata-v14` for benchmarks Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * Adjust cargo fmt Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * kitchensink-runtime: Add feature flag for `metadata-v14` Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support/test: Adjust testing Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support/test: Check crates locally Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * Activate metadata-v14 for pallets Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * Remove metadata-v14 feature flag Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/metadata_ir: Move `api.rs` to `mod.rs` Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Handle latest metadata conversion via IR Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/tests: Add constant for metadata version 14 Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support/test: Fix merge conflict Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * Update frame/support/Cargo.toml Co-authored-by: Bastian Köcher <git@kchr.de> * Update frame/support/src/metadata_ir/mod.rs Co-authored-by: Bastian Köcher <git@kchr.de> * Update frame/support/test/Cargo.toml Co-authored-by: Bastian Köcher <git@kchr.de> * Update primitives/api/src/lib.rs Co-authored-by: Bastian Köcher <git@kchr.de> * frame/metadata: Collect pallet documentation for MetadataIR Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/tests: Check pallet documentation is propagated to MetadataIR Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> * frame/support: Improve documentation Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> --------- Signed-off-by: Alexandru Vasile <alexandru.vasile@parity.io> Co-authored-by: parity-processbot <> Co-authored-by: Bastian Köcher <git@kchr.de>
223 lines
6.2 KiB
Rust
223 lines
6.2 KiB
Rust
// This file is part of Substrate.
|
|
|
|
// Copyright (C) Parity Technologies (UK) Ltd.
|
|
// SPDX-License-Identifier: Apache-2.0
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
|
|
//! Hash utilities.
|
|
|
|
use crate::metadata_ir;
|
|
use codec::{Codec, MaxEncodedLen};
|
|
use sp_io::hashing::{blake2_128, blake2_256, twox_128, twox_256, twox_64};
|
|
use sp_std::prelude::Vec;
|
|
|
|
// This trait must be kept coherent with frame-support-procedural HasherKind usage
|
|
pub trait Hashable: Sized {
|
|
fn blake2_128(&self) -> [u8; 16];
|
|
fn blake2_256(&self) -> [u8; 32];
|
|
fn blake2_128_concat(&self) -> Vec<u8>;
|
|
fn twox_128(&self) -> [u8; 16];
|
|
fn twox_256(&self) -> [u8; 32];
|
|
fn twox_64_concat(&self) -> Vec<u8>;
|
|
fn identity(&self) -> Vec<u8>;
|
|
}
|
|
|
|
impl<T: Codec> Hashable for T {
|
|
fn blake2_128(&self) -> [u8; 16] {
|
|
self.using_encoded(blake2_128)
|
|
}
|
|
fn blake2_256(&self) -> [u8; 32] {
|
|
self.using_encoded(blake2_256)
|
|
}
|
|
fn blake2_128_concat(&self) -> Vec<u8> {
|
|
self.using_encoded(Blake2_128Concat::hash)
|
|
}
|
|
fn twox_128(&self) -> [u8; 16] {
|
|
self.using_encoded(twox_128)
|
|
}
|
|
fn twox_256(&self) -> [u8; 32] {
|
|
self.using_encoded(twox_256)
|
|
}
|
|
fn twox_64_concat(&self) -> Vec<u8> {
|
|
self.using_encoded(Twox64Concat::hash)
|
|
}
|
|
fn identity(&self) -> Vec<u8> {
|
|
self.encode()
|
|
}
|
|
}
|
|
|
|
/// Hasher to use to hash keys to insert to storage.
|
|
pub trait StorageHasher: 'static {
|
|
const METADATA: metadata_ir::StorageHasherIR;
|
|
type Output: AsRef<[u8]>;
|
|
fn hash(x: &[u8]) -> Self::Output;
|
|
|
|
/// The max length of the final hash, for the given key type.
|
|
fn max_len<K: MaxEncodedLen>() -> usize;
|
|
}
|
|
|
|
/// Hasher to use to hash keys to insert to storage.
|
|
///
|
|
/// Reversible hasher store the encoded key after the hash part.
|
|
pub trait ReversibleStorageHasher: StorageHasher {
|
|
/// Split the hash part out of the input.
|
|
///
|
|
/// I.e. for input `&[hash ++ key ++ some]` returns `&[key ++ some]`
|
|
fn reverse(x: &[u8]) -> &[u8];
|
|
}
|
|
|
|
/// Store the key directly.
|
|
pub struct Identity;
|
|
impl StorageHasher for Identity {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Identity;
|
|
type Output = Vec<u8>;
|
|
fn hash(x: &[u8]) -> Vec<u8> {
|
|
x.to_vec()
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
K::max_encoded_len()
|
|
}
|
|
}
|
|
impl ReversibleStorageHasher for Identity {
|
|
fn reverse(x: &[u8]) -> &[u8] {
|
|
x
|
|
}
|
|
}
|
|
|
|
/// Hash storage keys with `concat(twox64(key), key)`
|
|
pub struct Twox64Concat;
|
|
impl StorageHasher for Twox64Concat {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Twox64Concat;
|
|
type Output = Vec<u8>;
|
|
fn hash(x: &[u8]) -> Vec<u8> {
|
|
twox_64(x).iter().chain(x.iter()).cloned().collect::<Vec<_>>()
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
K::max_encoded_len().saturating_add(8)
|
|
}
|
|
}
|
|
impl ReversibleStorageHasher for Twox64Concat {
|
|
fn reverse(x: &[u8]) -> &[u8] {
|
|
if x.len() < 8 {
|
|
log::error!("Invalid reverse: hash length too short");
|
|
return &[]
|
|
}
|
|
&x[8..]
|
|
}
|
|
}
|
|
|
|
/// Hash storage keys with `concat(blake2_128(key), key)`
|
|
pub struct Blake2_128Concat;
|
|
impl StorageHasher for Blake2_128Concat {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Blake2_128Concat;
|
|
type Output = Vec<u8>;
|
|
fn hash(x: &[u8]) -> Vec<u8> {
|
|
blake2_128(x).iter().chain(x.iter()).cloned().collect::<Vec<_>>()
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
K::max_encoded_len().saturating_add(16)
|
|
}
|
|
}
|
|
impl ReversibleStorageHasher for Blake2_128Concat {
|
|
fn reverse(x: &[u8]) -> &[u8] {
|
|
if x.len() < 16 {
|
|
log::error!("Invalid reverse: hash length too short");
|
|
return &[]
|
|
}
|
|
&x[16..]
|
|
}
|
|
}
|
|
|
|
/// Hash storage keys with blake2 128
|
|
pub struct Blake2_128;
|
|
impl StorageHasher for Blake2_128 {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Blake2_128;
|
|
type Output = [u8; 16];
|
|
fn hash(x: &[u8]) -> [u8; 16] {
|
|
blake2_128(x)
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
16
|
|
}
|
|
}
|
|
|
|
/// Hash storage keys with blake2 256
|
|
pub struct Blake2_256;
|
|
impl StorageHasher for Blake2_256 {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Blake2_256;
|
|
type Output = [u8; 32];
|
|
fn hash(x: &[u8]) -> [u8; 32] {
|
|
blake2_256(x)
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
32
|
|
}
|
|
}
|
|
|
|
/// Hash storage keys with twox 128
|
|
pub struct Twox128;
|
|
impl StorageHasher for Twox128 {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Twox128;
|
|
type Output = [u8; 16];
|
|
fn hash(x: &[u8]) -> [u8; 16] {
|
|
twox_128(x)
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
16
|
|
}
|
|
}
|
|
|
|
/// Hash storage keys with twox 256
|
|
pub struct Twox256;
|
|
impl StorageHasher for Twox256 {
|
|
const METADATA: metadata_ir::StorageHasherIR = metadata_ir::StorageHasherIR::Twox256;
|
|
type Output = [u8; 32];
|
|
fn hash(x: &[u8]) -> [u8; 32] {
|
|
twox_256(x)
|
|
}
|
|
fn max_len<K: MaxEncodedLen>() -> usize {
|
|
32
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn test_twox_64_concat() {
|
|
let r = Twox64Concat::hash(b"foo");
|
|
assert_eq!(r.split_at(8), (&twox_128(b"foo")[..8], &b"foo"[..]))
|
|
}
|
|
|
|
#[test]
|
|
fn test_blake2_128_concat() {
|
|
let r = Blake2_128Concat::hash(b"foo");
|
|
assert_eq!(r.split_at(16), (&blake2_128(b"foo")[..], &b"foo"[..]))
|
|
}
|
|
|
|
#[test]
|
|
fn max_lengths() {
|
|
use codec::Encode;
|
|
let encoded_0u32 = &0u32.encode()[..];
|
|
assert_eq!(Twox64Concat::hash(encoded_0u32).len(), Twox64Concat::max_len::<u32>());
|
|
assert_eq!(Twox128::hash(encoded_0u32).len(), Twox128::max_len::<u32>());
|
|
assert_eq!(Twox256::hash(encoded_0u32).len(), Twox256::max_len::<u32>());
|
|
assert_eq!(Blake2_128::hash(encoded_0u32).len(), Blake2_128::max_len::<u32>());
|
|
assert_eq!(Blake2_128Concat::hash(encoded_0u32).len(), Blake2_128Concat::max_len::<u32>());
|
|
assert_eq!(Blake2_256::hash(encoded_0u32).len(), Blake2_256::max_len::<u32>());
|
|
assert_eq!(Identity::hash(encoded_0u32).len(), Identity::max_len::<u32>());
|
|
}
|
|
}
|