Refactor away from opaque hashes (#5226)

* System.BlockHash

* Fix hash

* Introduce K/V iteration in all _concat maps

Also move across:
- System.Account (blake2_128_concat)
- Balances.Locks (twox_64_concat)
- ElectionsPhragmen.VotesOf (twox_64_concat)
- ElectionsPhragmen.StakeOf (twox_64_concat)
- Identity.IdentityOf (twox_64_concat)
- Identity.SubsOf (twox_64_concat)
- Society.Payouts (twox_64_concat)
- Session.NextKeys (twox_64_concat)
- Identity.SuperOf (blake2_128_concat)
- Session.KeyOwner (blake2_128_concat)
- Society.SuspendedCandidates (twox_64_concat)
- Society.SuspendedMembers (twox_64_concat)
- Society.Vouching (twox_64_concat)
- Society.Strikes (twox_64_concat)
- System.EventTopics
- Balances.Account

* Build fixes

* Ensure migration happens in correct order

* Staking.*

* Vesting.* Offences.*

* Democracy.*

* Babe.* Collective.*

* Grandpa.*

* Assets.* Benchmark.* Contracts.* Elections.* Asset.* Nicks.*

Also introduce real account list

* ImOnline.*

* Treasury.*

* Recovery.*

* Final bits.

* Docs

* Fix one test

* Fix test

* All passing except the UI tests

* Remove linked_map part 1

* Remove linked_map

* Some iterator utils for double maps.

* Remove old migrations

* Introduce tombstone for LinkedMap type

* Migration for genesis hash

* Fix build

* Fix hash

* Rename Map is_linked -> unused, keeping backwards compat (#5256)

* Update frame/balances/src/lib.rs

Co-Authored-By: Shawn Tabrizi <shawntabrizi@gmail.com>

* Update frame/elections/src/lib.rs

Co-Authored-By: Shawn Tabrizi <shawntabrizi@gmail.com>

* Remove old migration code.

* Update frame/system/src/lib.rs

Co-Authored-By: Shawn Tabrizi <shawntabrizi@gmail.com>

* Update bin/node/runtime/src/lib.rs

Co-Authored-By: Shawn Tabrizi <shawntabrizi@gmail.com>

* Fix hash

* fix session migration

* Fix watning

Co-authored-by: Jaco Greeff <jacogr@gmail.com>
Co-authored-by: Shawn Tabrizi <shawntabrizi@gmail.com>
Co-authored-by: Robert Habermeier <rphmeier@gmail.com>
This commit is contained in:
Gavin Wood
2020-03-16 23:19:53 +01:00
committed by GitHub
parent 846a9ce8c6
commit af9083f53b
94 changed files with 1111 additions and 2020 deletions
@@ -16,8 +16,9 @@
use sp_std::prelude::*;
use sp_std::borrow::Borrow;
use codec::{Ref, FullCodec, FullEncode, Encode, EncodeLike, EncodeAppend};
use crate::{storage::{self, unhashed}, hash::{StorageHasher, Twox128}, traits::Len};
use codec::{Ref, FullCodec, FullEncode, Decode, Encode, EncodeLike, EncodeAppend};
use crate::{storage::{self, unhashed}, traits::Len};
use crate::hash::{StorageHasher, Twox128, ReversibleStorageHasher};
/// Generator for `StorageDoubleMap` used by `decl_storage`.
///
@@ -55,6 +56,22 @@ pub trait StorageDoubleMap<K1: FullEncode, K2: FullEncode, V: FullCodec> {
/// Storage prefix. Used for generating final key.
fn storage_prefix() -> &'static [u8];
/// The full prefix; just the hash of `module_prefix` concatenated to the hash of
/// `storage_prefix`.
fn prefix_hash() -> Vec<u8> {
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
let storage_prefix_hashed = Twox128::hash(Self::storage_prefix());
let mut result = Vec::with_capacity(
module_prefix_hashed.len() + storage_prefix_hashed.len()
);
result.extend_from_slice(&module_prefix_hashed[..]);
result.extend_from_slice(&storage_prefix_hashed[..]);
result
}
/// Convert an optional value retrieved from storage to the type queried.
fn from_optional_value_to_query(v: Option<V>) -> Self::Query;
@@ -62,8 +79,7 @@ pub trait StorageDoubleMap<K1: FullEncode, K2: FullEncode, V: FullCodec> {
fn from_query_to_optional_value(v: Self::Query) -> Option<V>;
/// Generate the first part of the key used in top storage.
fn storage_double_map_final_key1<KArg1>(k1: KArg1) -> Vec<u8>
where
fn storage_double_map_final_key1<KArg1>(k1: KArg1) -> Vec<u8> where
KArg1: EncodeLike<K1>,
{
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
@@ -82,19 +98,32 @@ pub trait StorageDoubleMap<K1: FullEncode, K2: FullEncode, V: FullCodec> {
}
/// Generate the full key used in top storage.
fn storage_double_map_final_key<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Vec<u8>
where
fn storage_double_map_final_key<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Vec<u8> where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
{
let mut final_key = Self::storage_double_map_final_key1(k1);
final_key.extend_from_slice(k2.using_encoded(Self::Hasher2::hash).as_ref());
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
let storage_prefix_hashed = Twox128::hash(Self::storage_prefix());
let key1_hashed = k1.borrow().using_encoded(Self::Hasher1::hash);
let key2_hashed = k2.borrow().using_encoded(Self::Hasher2::hash);
let mut final_key = Vec::with_capacity(
module_prefix_hashed.len()
+ storage_prefix_hashed.len()
+ key1_hashed.as_ref().len()
+ key2_hashed.as_ref().len()
);
final_key.extend_from_slice(&module_prefix_hashed[..]);
final_key.extend_from_slice(&storage_prefix_hashed[..]);
final_key.extend_from_slice(key1_hashed.as_ref());
final_key.extend_from_slice(key2_hashed.as_ref());
final_key
}
}
impl<K1, K2, V, G> storage::StorageDoubleMap<K1, K2, V> for G
where
impl<K1, K2, V, G> storage::StorageDoubleMap<K1, K2, V> for G where
K1: FullEncode,
K2: FullEncode,
V: FullCodec,
@@ -102,32 +131,28 @@ where
{
type Query = G::Query;
fn hashed_key_for<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Vec<u8>
where
fn hashed_key_for<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Vec<u8> where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
{
Self::storage_double_map_final_key(k1, k2)
}
fn contains_key<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> bool
where
fn contains_key<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> bool where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
{
unhashed::exists(&Self::storage_double_map_final_key(k1, k2))
}
fn get<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Self::Query
where
fn get<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Self::Query where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
{
G::from_optional_value_to_query(unhashed::get(&Self::storage_double_map_final_key(k1, k2)))
}
fn take<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Self::Query
where
fn take<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> Self::Query where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
{
@@ -137,8 +162,12 @@ where
G::from_optional_value_to_query(value)
}
fn swap<XKArg1, XKArg2, YKArg1, YKArg2>(x_k1: XKArg1, x_k2: XKArg2, y_k1: YKArg1, y_k2: YKArg2)
where
fn swap<XKArg1, XKArg2, YKArg1, YKArg2>(
x_k1: XKArg1,
x_k2: XKArg2,
y_k1: YKArg1,
y_k2: YKArg2
) where
XKArg1: EncodeLike<K1>,
XKArg2: EncodeLike<K2>,
YKArg1: EncodeLike<K1>,
@@ -160,8 +189,7 @@ where
}
}
fn insert<KArg1, KArg2, VArg>(k1: KArg1, k2: KArg2, val: VArg)
where
fn insert<KArg1, KArg2, VArg>(k1: KArg1, k2: KArg2, val: VArg) where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
VArg: EncodeLike<V>,
@@ -169,8 +197,7 @@ where
unhashed::put(&Self::storage_double_map_final_key(k1, k2), &val.borrow())
}
fn remove<KArg1, KArg2>(k1: KArg1, k2: KArg2)
where
fn remove<KArg1, KArg2>(k1: KArg1, k2: KArg2) where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
{
@@ -181,8 +208,8 @@ where
unhashed::kill_prefix(Self::storage_double_map_final_key1(k1).as_ref())
}
fn iter_prefix<KArg1>(k1: KArg1) -> storage::PrefixIterator<V>
where KArg1: ?Sized + EncodeLike<K1>
fn iter_prefix<KArg1>(k1: KArg1) -> storage::PrefixIterator<V> where
KArg1: ?Sized + EncodeLike<K1>
{
let prefix = Self::storage_double_map_final_key1(k1);
storage::PrefixIterator::<V> {
@@ -192,8 +219,7 @@ where
}
}
fn mutate<KArg1, KArg2, R, F>(k1: KArg1, k2: KArg2, f: F) -> R
where
fn mutate<KArg1, KArg2, R, F>(k1: KArg1, k2: KArg2, f: F) -> R where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
F: FnOnce(&mut Self::Query) -> R,
@@ -213,8 +239,7 @@ where
k1: KArg1,
k2: KArg2,
items: Items,
) -> Result<(), &'static str>
where
) -> Result<(), &'static str> where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
Item: Encode,
@@ -246,8 +271,7 @@ where
k1: KArg1,
k2: KArg2,
items: Items,
)
where
) where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
Item: Encode,
@@ -260,10 +284,10 @@ where
.unwrap_or_else(|_| Self::insert(k1, k2, items));
}
fn decode_len<KArg1, KArg2>(key1: KArg1, key2: KArg2) -> Result<usize, &'static str>
where KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
V: codec::DecodeLength + Len,
fn decode_len<KArg1, KArg2>(key1: KArg1, key2: KArg2) -> Result<usize, &'static str> where
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
V: codec::DecodeLength + Len,
{
let final_key = Self::storage_double_map_final_key(key1, key2);
if let Some(v) = unhashed::get_raw(&final_key) {
@@ -276,6 +300,135 @@ where
Ok(len)
}
}
fn migrate_keys<
OldHasher1: StorageHasher,
OldHasher2: StorageHasher,
KeyArg1: EncodeLike<K1>,
KeyArg2: EncodeLike<K2>,
>(key1: KeyArg1, key2: KeyArg2) -> Option<V> {
let old_key = {
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
let storage_prefix_hashed = Twox128::hash(Self::storage_prefix());
let key1_hashed = key1.borrow().using_encoded(OldHasher1::hash);
let key2_hashed = key2.borrow().using_encoded(OldHasher2::hash);
let mut final_key = Vec::with_capacity(
module_prefix_hashed.len()
+ storage_prefix_hashed.len()
+ key1_hashed.as_ref().len()
+ key2_hashed.as_ref().len()
);
final_key.extend_from_slice(&module_prefix_hashed[..]);
final_key.extend_from_slice(&storage_prefix_hashed[..]);
final_key.extend_from_slice(key1_hashed.as_ref());
final_key.extend_from_slice(key2_hashed.as_ref());
final_key
};
unhashed::take(old_key.as_ref()).map(|value| {
unhashed::put(Self::storage_double_map_final_key(key1, key2).as_ref(), &value);
value
})
}
}
/// Utility to iterate through items in a storage map.
pub struct MapIterator<K, V, Hasher> {
prefix: Vec<u8>,
previous_key: Vec<u8>,
drain: bool,
_phantom: ::sp_std::marker::PhantomData<(K, V, Hasher)>,
}
impl<
K: Decode + Sized,
V: Decode + Sized,
Hasher: ReversibleStorageHasher
> Iterator for MapIterator<K, V, Hasher> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
loop {
let maybe_next = sp_io::storage::next_key(&self.previous_key)
.filter(|n| n.starts_with(&self.prefix));
break match maybe_next {
Some(next) => {
self.previous_key = next;
match unhashed::get::<V>(&self.previous_key) {
Some(value) => {
if self.drain {
unhashed::kill(&self.previous_key)
}
let mut key_material = Hasher::reverse(&self.previous_key[self.prefix.len()..]);
match K::decode(&mut key_material) {
Ok(key) => Some((key, value)),
Err(_) => continue,
}
}
None => continue,
}
}
None => None,
}
}
}
}
impl<
K1: FullCodec,
K2: FullCodec,
V: FullCodec,
G: StorageDoubleMap<K1, K2, V>,
> storage::IterableStorageDoubleMap<K1, K2, V> for G where
G::Hasher1: ReversibleStorageHasher,
G::Hasher2: ReversibleStorageHasher
{
type Iterator = MapIterator<K2, V, G::Hasher2>;
/// Enumerate all elements in the map.
fn iter(k1: impl EncodeLike<K1>) -> Self::Iterator {
let prefix = G::storage_double_map_final_key1(k1);
Self::Iterator {
prefix: prefix.clone(),
previous_key: prefix,
drain: false,
_phantom: Default::default(),
}
}
/// Enumerate all elements in the map.
fn drain(k1: impl EncodeLike<K1>) -> Self::Iterator {
let prefix = G::storage_double_map_final_key1(k1);
Self::Iterator {
prefix: prefix.clone(),
previous_key: prefix,
drain: true,
_phantom: Default::default(),
}
}
fn translate<O: Decode, F: Fn(O) -> Option<V>>(f: F) {
let prefix = G::prefix_hash();
let mut previous_key = prefix.clone();
loop {
match sp_io::storage::next_key(&previous_key).filter(|n| n.starts_with(&prefix)) {
Some(next) => {
previous_key = next;
let maybe_value = unhashed::get::<O>(&previous_key);
match maybe_value {
Some(value) => match f(value) {
Some(new) => unhashed::put::<V>(&previous_key, &new),
None => unhashed::kill(&previous_key),
},
None => continue,
}
}
None => return,
}
}
}
}
#[cfg(test)]
@@ -1,499 +0,0 @@
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
use codec::{FullCodec, Encode, Decode, EncodeLike, Ref};
use crate::{storage::{self, unhashed}, hash::{StorageHasher, Twox128}, traits::Len};
use sp_std::{prelude::*, marker::PhantomData};
/// Generator for `StorageLinkedMap` used by `decl_storage`.
///
/// By default final key generation rely on `KeyFormat`.
pub trait StorageLinkedMap<K: FullCodec, V: FullCodec> {
/// The type that get/take returns.
type Query;
/// The family of key formats used for this map.
type KeyFormat: KeyFormat;
/// Convert an optional value retrieved from storage to the type queried.
fn from_optional_value_to_query(v: Option<V>) -> Self::Query;
/// Convert a query to an optional value into storage.
fn from_query_to_optional_value(v: Self::Query) -> Option<V>;
/// Generate the full key used in top storage.
fn storage_linked_map_final_key<KeyArg>(key: KeyArg) -> Vec<u8>
where
KeyArg: EncodeLike<K>,
{
<Self::KeyFormat as KeyFormat>::storage_linked_map_final_key::<KeyArg>(&key)
}
/// Generate the hashed key for head
fn storage_linked_map_final_head_key() -> Vec<u8> {
<Self::KeyFormat as KeyFormat>::storage_linked_map_final_head_key()
}
}
/// A type-abstracted key format used for a family of linked-map types.
///
/// # Default mapping of keys to a storage path
///
/// The key for the head of the map is stored at one fixed path:
/// ```nocompile
/// Twox128(module_prefix) ++ Twox128(head_prefix)
/// ```
///
/// For each key, the value stored under that key is appended with a
/// [`Linkage`](struct.Linkage.html) (which hold previous and next key) at the path:
/// ```nocompile
/// Twox128(module_prefix) ++ Twox128(storage_prefix) ++ Hasher(encode(key))
/// ```
///
/// Enumeration is done by getting the head of the linked map and then iterating getting the
/// value and linkage stored at the key until the found linkage has no next key.
///
/// # Warning
///
/// If the keys are not trusted (e.g. can be set by a user), a cryptographic `hasher` such as
/// `blake2_256` must be used. Otherwise, other values in storage can be compromised.
pub trait KeyFormat {
/// Hasher. Used for generating final key and final head key.
type Hasher: StorageHasher;
/// Module prefix. Used for generating final key.
fn module_prefix() -> &'static [u8];
/// Storage prefix. Used for generating final key.
fn storage_prefix() -> &'static [u8];
/// Storage prefix. Used for generating final head key.
fn head_prefix() -> &'static [u8];
/// Generate the full key used in top storage.
fn storage_linked_map_final_key<K>(key: &K) -> Vec<u8>
where
K: Encode,
{
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
let storage_prefix_hashed = Twox128::hash(Self::storage_prefix());
let key_hashed = key.using_encoded(Self::Hasher::hash);
let mut final_key = Vec::with_capacity(
module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len()
);
final_key.extend_from_slice(&module_prefix_hashed[..]);
final_key.extend_from_slice(&storage_prefix_hashed[..]);
final_key.extend_from_slice(key_hashed.as_ref());
final_key
}
/// Generate the full key used in top storage to store the head of the linked map.
fn storage_linked_map_final_head_key() -> Vec<u8> {
[
Twox128::hash(Self::module_prefix()),
Twox128::hash(Self::head_prefix()),
].concat()
}
}
/// Linkage data of an element (it's successor and predecessor)
#[derive(Encode, Decode)]
pub struct Linkage<Key> {
/// Previous element key in storage (None for the first element)
pub previous: Option<Key>,
/// Next element key in storage (None for the last element)
pub next: Option<Key>,
}
impl<Key> Default for Linkage<Key> {
fn default() -> Self {
Self {
previous: None,
next: None,
}
}
}
// Encode like a linkage.
#[derive(Encode)]
struct EncodeLikeLinkage<PKey: EncodeLike<Key>, NKey: EncodeLike<Key>, Key: Encode> {
// Previous element key in storage (None for the first element)
previous: Option<PKey>,
// Next element key in storage (None for the last element)
next: Option<NKey>,
// The key of the linkage this type encode to
phantom: core::marker::PhantomData<Key>,
}
/// A key-value pair iterator for enumerable map.
pub struct Enumerator<K, V, F> {
next: Option<K>,
_phantom: PhantomData<(V, F)>,
}
impl<K, V, F> Enumerator<K, V, F> {
/// Create an explicit enumerator for testing.
#[cfg(test)]
pub fn from_head(head: K) -> Self {
Enumerator {
next: Some(head),
_phantom: Default::default(),
}
}
}
impl<K, V, F> Iterator for Enumerator<K, V, F>
where
K: FullCodec,
V: FullCodec,
F: KeyFormat,
{
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
let next = self.next.take()?;
let (val, linkage): (V, Linkage<K>) = {
let next_full_key = F::storage_linked_map_final_key(&next);
match read_with_linkage::<K, V>(next_full_key.as_ref()) {
Some(value) => value,
None => {
// TODO #3700: error should be handleable.
runtime_print!(
"ERROR: Corrupted state: linked map {:?}{:?}: \
next value doesn't exist at {:?}",
F::module_prefix(), F::storage_prefix(), next_full_key,
);
return None
}
}
};
self.next = linkage.next;
Some((next, val))
}
}
/// Update linkage when this element is removed.
///
/// Takes care of updating previous and next elements points
/// as well as updates head if the element is first or last.
fn remove_linkage<K, V, F>(linkage: Linkage<K>)
where
K: FullCodec,
V: FullCodec,
F: KeyFormat,
{
let next_key = linkage.next.as_ref().map(|k| F::storage_linked_map_final_key(k));
let prev_key = linkage.previous.as_ref().map(|k| F::storage_linked_map_final_key(k));
if let Some(prev_key) = prev_key {
// Retrieve previous element and update `next`
if let Some(mut res) = read_with_linkage::<K, V>(prev_key.as_ref()) {
res.1.next = linkage.next;
unhashed::put(prev_key.as_ref(), &res);
} else {
// TODO #3700: error should be handleable.
runtime_print!(
"ERROR: Corrupted state: linked map {:?}{:?}: \
previous value doesn't exist at {:?}",
F::module_prefix(), F::storage_prefix(), prev_key,
);
}
} else {
// we were first so let's update the head
write_head::<&K, K, F>(linkage.next.as_ref());
}
if let Some(next_key) = next_key {
// Update previous of next element
if let Some(mut res) = read_with_linkage::<K, V>(next_key.as_ref()) {
res.1.previous = linkage.previous;
unhashed::put(next_key.as_ref(), &res);
} else {
// TODO #3700: error should be handleable.
runtime_print!(
"ERROR: Corrupted state: linked map {:?}{:?}: \
next value doesn't exist at {:?}",
F::module_prefix(), F::storage_prefix(), next_key,
);
}
}
}
/// Read the contained data and its linkage.
pub(super) fn read_with_linkage<K, V>(key: &[u8]) -> Option<(V, Linkage<K>)>
where
K: Decode,
V: Decode,
{
unhashed::get(key)
}
/// Generate linkage for newly inserted element.
///
/// Takes care of updating head and previous head's pointer.
pub(super) fn new_head_linkage<KeyArg, K, V, F>(key: KeyArg) -> Linkage<K>
where
KeyArg: EncodeLike<K>,
K: FullCodec,
V: FullCodec,
F: KeyFormat,
{
if let Some(head) = read_head::<K, F>() {
// update previous head predecessor
{
let head_key = F::storage_linked_map_final_key(&head);
if let Some((data, linkage)) = read_with_linkage::<K, V>(head_key.as_ref()) {
let new_linkage = EncodeLikeLinkage::<_, _, K> {
previous: Some(Ref::from(&key)),
next: linkage.next.as_ref(),
phantom: Default::default(),
};
unhashed::put(head_key.as_ref(), &(data, new_linkage));
} else {
// TODO #3700: error should be handleable.
runtime_print!(
"ERROR: Corrupted state: linked map {:?}{:?}: \
head value doesn't exist at {:?}",
F::module_prefix(), F::storage_prefix(), head_key,
);
// Thus we consider we are first - update the head and produce empty linkage
write_head::<_, _, F>(Some(key));
return Linkage::default();
}
}
// update to current head
write_head::<_, _, F>(Some(key));
// return linkage with pointer to previous head
let mut linkage = Linkage::default();
linkage.next = Some(head);
linkage
} else {
// we are first - update the head and produce empty linkage
write_head::<_, _, F>(Some(key));
Linkage::default()
}
}
/// Read current head pointer.
pub(crate) fn read_head<K, F>() -> Option<K>
where
K: Decode,
F: KeyFormat,
{
unhashed::get(F::storage_linked_map_final_head_key().as_ref())
}
/// Overwrite current head pointer.
///
/// If `None` is given head is removed from storage.
pub(super) fn write_head<KeyArg, K, F>(head: Option<KeyArg>)
where
KeyArg: EncodeLike<K>,
K: FullCodec,
F: KeyFormat,
{
match head.as_ref() {
Some(head) => unhashed::put(F::storage_linked_map_final_head_key().as_ref(), head),
None => unhashed::kill(F::storage_linked_map_final_head_key().as_ref()),
}
}
impl<K, V, G> storage::StorageLinkedMap<K, V> for G
where
K: FullCodec,
V: FullCodec,
G: StorageLinkedMap<K, V>,
{
type Query = G::Query;
type Enumerator = Enumerator<K, V, G::KeyFormat>;
fn contains_key<KeyArg: EncodeLike<K>>(key: KeyArg) -> bool {
unhashed::exists(Self::storage_linked_map_final_key(key).as_ref())
}
fn get<KeyArg: EncodeLike<K>>(key: KeyArg) -> Self::Query {
let val = unhashed::get(Self::storage_linked_map_final_key(key).as_ref());
G::from_optional_value_to_query(val)
}
fn swap<KeyArg1: EncodeLike<K>, KeyArg2: EncodeLike<K>>(key1: KeyArg1, key2: KeyArg2) {
let final_key1 = Self::storage_linked_map_final_key(Ref::from(&key1));
let final_key2 = Self::storage_linked_map_final_key(Ref::from(&key2));
let full_value_1 = read_with_linkage::<K, V>(final_key1.as_ref());
let full_value_2 = read_with_linkage::<K, V>(final_key2.as_ref());
match (full_value_1, full_value_2) {
// Just keep linkage in order and only swap values.
(Some((value1, linkage1)), Some((value2, linkage2))) => {
unhashed::put(final_key1.as_ref(), &(value2, linkage1));
unhashed::put(final_key2.as_ref(), &(value1, linkage2));
}
// Remove key and insert the new one.
(Some((value, _linkage)), None) => {
Self::remove(key1);
let linkage = new_head_linkage::<_, _, V, G::KeyFormat>(key2);
unhashed::put(final_key2.as_ref(), &(value, linkage));
}
// Remove key and insert the new one.
(None, Some((value, _linkage))) => {
Self::remove(key2);
let linkage = new_head_linkage::<_, _, V, G::KeyFormat>(key1);
unhashed::put(final_key1.as_ref(), &(value, linkage));
}
// No-op.
(None, None) => (),
}
}
fn insert<KeyArg: EncodeLike<K>, ValArg: EncodeLike<V>>(key: KeyArg, val: ValArg) {
let final_key = Self::storage_linked_map_final_key(Ref::from(&key));
let linkage = match read_with_linkage::<_, V>(final_key.as_ref()) {
// overwrite but reuse existing linkage
Some((_data, linkage)) => linkage,
// create new linkage
None => new_head_linkage::<_, _, V, G::KeyFormat>(key),
};
unhashed::put(final_key.as_ref(), &(val, linkage))
}
fn remove<KeyArg: EncodeLike<K>>(key: KeyArg) {
G::take(key);
}
fn mutate<KeyArg: EncodeLike<K>, R, F: FnOnce(&mut Self::Query) -> R>(key: KeyArg, f: F) -> R {
let final_key = Self::storage_linked_map_final_key(Ref::from(&key));
let (mut val, _linkage) = read_with_linkage::<K, V>(final_key.as_ref())
.map(|(data, linkage)| (G::from_optional_value_to_query(Some(data)), Some(linkage)))
.unwrap_or_else(|| (G::from_optional_value_to_query(None), None));
let ret = f(&mut val);
match G::from_query_to_optional_value(val) {
Some(ref val) => G::insert(key, val),
None => G::remove(key),
}
ret
}
fn take<KeyArg: EncodeLike<K>>(key: KeyArg) -> Self::Query {
let final_key = Self::storage_linked_map_final_key(key);
let full_value: Option<(V, Linkage<K>)> = unhashed::take(final_key.as_ref());
let value = full_value.map(|(data, linkage)| {
remove_linkage::<K, V, G::KeyFormat>(linkage);
data
});
G::from_optional_value_to_query(value)
}
fn enumerate() -> Self::Enumerator {
Enumerator::<_, _, G::KeyFormat> {
next: read_head::<_, G::KeyFormat>(),
_phantom: Default::default(),
}
}
fn head() -> Option<K> {
read_head::<_, G::KeyFormat>()
}
fn decode_len<KeyArg: EncodeLike<K>>(key: KeyArg) -> Result<usize, &'static str>
where V: codec::DecodeLength + Len
{
let key = Self::storage_linked_map_final_key(key);
if let Some(v) = unhashed::get_raw(key.as_ref()) {
<V as codec::DecodeLength>::len(&v).map_err(|e| e.what())
} else {
let len = G::from_query_to_optional_value(G::from_optional_value_to_query(None))
.map(|v| v.len())
.unwrap_or(0);
Ok(len)
}
}
/// The translation happens in-place, new keys are inserted at the same time as old keys are
/// removed, thus new keys must not collide with still remaining old keys.
fn translate<K2, V2, TK, TV>(translate_key: TK, translate_val: TV) -> Result<(), Option<K2>>
where K2: FullCodec + Clone, V2: Decode, TK: Fn(K2) -> K, TV: Fn(V2) -> V
{
let head_key = read_head::<K2, G::KeyFormat>().ok_or(None)?;
let mut last_key = None;
let mut current_key = head_key.clone();
write_head::<&K, K, G::KeyFormat>(Some(&translate_key(head_key)));
let translate_linkage = |old: Linkage<K2>| -> Linkage<K> {
Linkage {
previous: old.previous.map(&translate_key),
next: old.next.map(&translate_key),
}
};
loop {
let old_raw_key = G::KeyFormat::storage_linked_map_final_key(&current_key);
let x = unhashed::take(old_raw_key.as_ref());
let (val, linkage): (V2, Linkage<K2>) = match x {
Some(v) => v,
None => {
// we failed to read value and linkage. Update the last key's linkage
// to end the map early, since it's impossible to iterate further.
if let Some(last_key) = last_key {
let last_raw_key = G::storage_linked_map_final_key(&last_key);
if let Some((val, mut linkage))
= read_with_linkage::<K, V>(last_raw_key.as_ref())
{
// defensive: should always happen, since it was just written
// in the last iteration of the loop.
linkage.next = None;
unhashed::put(last_raw_key.as_ref(), &(&val, &linkage));
}
}
return Err(Some(current_key));
}
};
let next = linkage.next.clone();
let val = translate_val(val);
let linkage = translate_linkage(linkage);
// and write in the value and linkage under the new key.
let new_key = translate_key(current_key.clone());
let new_raw_key = G::storage_linked_map_final_key(&new_key);
unhashed::put(new_raw_key.as_ref(), &(&val, &linkage));
match next {
None => break,
Some(next) => {
last_key = Some(new_key);
current_key = next
},
}
}
Ok(())
}
}
@@ -17,8 +17,9 @@
#[cfg(not(feature = "std"))]
use sp_std::prelude::*;
use sp_std::borrow::Borrow;
use codec::{FullCodec, FullEncode, Encode, EncodeLike, Ref, EncodeAppend};
use crate::{storage::{self, unhashed}, hash::{StorageHasher, Twox128}, traits::Len};
use codec::{FullCodec, FullEncode, Decode, Encode, EncodeLike, Ref, EncodeAppend};
use crate::{storage::{self, unhashed}, traits::Len};
use crate::hash::{StorageHasher, Twox128, ReversibleStorageHasher};
/// Generator for `StorageMap` used by `decl_storage`.
///
@@ -44,6 +45,22 @@ pub trait StorageMap<K: FullEncode, V: FullCodec> {
/// Storage prefix. Used for generating final key.
fn storage_prefix() -> &'static [u8];
/// The full prefix; just the hash of `module_prefix` concatenated to the hash of
/// `storage_prefix`.
fn prefix_hash() -> Vec<u8> {
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
let storage_prefix_hashed = Twox128::hash(Self::storage_prefix());
let mut result = Vec::with_capacity(
module_prefix_hashed.len() + storage_prefix_hashed.len()
);
result.extend_from_slice(&module_prefix_hashed[..]);
result.extend_from_slice(&storage_prefix_hashed[..]);
result
}
/// Convert an optional value retrieved from storage to the type queried.
fn from_optional_value_to_query(v: Option<V>) -> Self::Query;
@@ -51,8 +68,7 @@ pub trait StorageMap<K: FullEncode, V: FullCodec> {
fn from_query_to_optional_value(v: Self::Query) -> Option<V>;
/// Generate the full key used in top storage.
fn storage_map_final_key<KeyArg>(key: KeyArg) -> Vec<u8>
where
fn storage_map_final_key<KeyArg>(key: KeyArg) -> Vec<u8> where
KeyArg: EncodeLike<K>,
{
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
@@ -71,6 +87,107 @@ pub trait StorageMap<K: FullEncode, V: FullCodec> {
}
}
/// Utility to iterate through items in a storage map.
pub struct StorageMapIterator<K, V, Hasher> {
prefix: Vec<u8>,
previous_key: Vec<u8>,
drain: bool,
_phantom: ::sp_std::marker::PhantomData<(K, V, Hasher)>,
}
impl<
K: Decode + Sized,
V: Decode + Sized,
Hasher: ReversibleStorageHasher
> Iterator for StorageMapIterator<K, V, Hasher> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
loop {
let maybe_next = sp_io::storage::next_key(&self.previous_key)
.filter(|n| n.starts_with(&self.prefix));
break match maybe_next {
Some(next) => {
self.previous_key = next;
match unhashed::get::<V>(&self.previous_key) {
Some(value) => {
if self.drain {
unhashed::kill(&self.previous_key)
}
let mut key_material = Hasher::reverse(&self.previous_key[self.prefix.len()..]);
match K::decode(&mut key_material) {
Ok(key) => Some((key, value)),
Err(_) => continue,
}
}
None => continue,
}
}
None => None,
}
}
}
}
impl<
K: FullCodec,
V: FullCodec,
G: StorageMap<K, V>,
> storage::IterableStorageMap<K, V> for G where
G::Hasher: ReversibleStorageHasher
{
type Iterator = StorageMapIterator<K, V, G::Hasher>;
/// Enumerate all elements in the map.
fn iter() -> Self::Iterator {
let prefix = G::prefix_hash();
Self::Iterator {
prefix: prefix.clone(),
previous_key: prefix,
drain: false,
_phantom: Default::default(),
}
}
/// Enumerate all elements in the map.
fn drain() -> Self::Iterator {
let prefix = G::prefix_hash();
Self::Iterator {
prefix: prefix.clone(),
previous_key: prefix,
drain: true,
_phantom: Default::default(),
}
}
fn translate<O: Decode, F: Fn(K, O) -> Option<V>>(f: F) {
let prefix = G::prefix_hash();
let mut previous_key = prefix.clone();
loop {
match sp_io::storage::next_key(&previous_key).filter(|n| n.starts_with(&prefix)) {
Some(next) => {
previous_key = next;
let maybe_value = unhashed::get::<O>(&previous_key);
match maybe_value {
Some(value) => {
let mut key_material = G::Hasher::reverse(&previous_key[prefix.len()..]);
match K::decode(&mut key_material) {
Ok(key) => match f(key, value) {
Some(new) => unhashed::put::<V>(&previous_key, &new),
None => unhashed::kill(&previous_key),
},
Err(_) => continue,
}
}
None => continue,
}
}
None => return,
}
}
}
}
impl<K: FullEncode, V: FullCodec, G: StorageMap<K, V>> storage::StorageMap<K, V> for G {
type Query = G::Query;
@@ -228,4 +345,26 @@ impl<K: FullEncode, V: FullCodec, G: StorageMap<K, V>> storage::StorageMap<K, V>
Ok(len)
}
}
fn migrate_key<OldHasher: StorageHasher, KeyArg: EncodeLike<K>>(key: KeyArg) -> Option<V> {
let old_key = {
let module_prefix_hashed = Twox128::hash(Self::module_prefix());
let storage_prefix_hashed = Twox128::hash(Self::storage_prefix());
let key_hashed = key.borrow().using_encoded(OldHasher::hash);
let mut final_key = Vec::with_capacity(
module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len()
);
final_key.extend_from_slice(&module_prefix_hashed[..]);
final_key.extend_from_slice(&storage_prefix_hashed[..]);
final_key.extend_from_slice(key_hashed.as_ref());
final_key
};
unhashed::take(old_key.as_ref()).map(|value| {
unhashed::put(Self::storage_map_final_key(key).as_ref(), &value);
value
})
}
}
@@ -23,23 +23,20 @@
//!
//! This is internal api and is subject to change.
mod linked_map;
mod map;
mod double_map;
mod value;
pub use linked_map::{StorageLinkedMap, Enumerator, Linkage, KeyFormat as LinkedMapKeyFormat};
pub use map::StorageMap;
pub use double_map::StorageDoubleMap;
pub use value::StorageValue;
#[cfg(test)]
#[allow(dead_code)]
mod tests {
use sp_io::TestExternalities;
use codec::{Encode, Decode};
use crate::storage::{unhashed, generator::{StorageValue, StorageLinkedMap}};
use codec::Encode;
use crate::storage::{unhashed, generator::StorageValue, IterableStorageMap};
struct Runtime {}
pub trait Trait {
@@ -56,16 +53,10 @@ mod tests {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {}
}
#[derive(Encode, Decode, Clone, Debug, Eq, PartialEq)]
struct NumberNumber {
a: u32,
b: u32,
}
crate::decl_storage! {
trait Store for Module<T: Trait> as Runtime {
Value get(fn value) config(): (u64, u64);
NumberMap: linked_map hasher(blake2_256) NumberNumber => u64;
NumberMap: map hasher(identity) u32 => u64;
}
}
@@ -89,41 +80,25 @@ mod tests {
}
#[test]
fn linked_map_translate_works() {
use super::linked_map::{self, Enumerator, KeyFormat};
type Format = <NumberMap as StorageLinkedMap<NumberNumber, u64>>::KeyFormat;
fn map_translate_works() {
let t = GenesisConfig::default().build_storage().unwrap();
TestExternalities::new(t).execute_with(|| {
// start with a map of u32 -> u32.
for i in 0u32..100u32 {
let final_key = <Format as KeyFormat>::storage_linked_map_final_key(&i);
let linkage = linked_map::new_head_linkage::<_, u32, u32, Format>(&i);
unhashed::put(final_key.as_ref(), &(&i, linkage));
unhashed::put(&NumberMap::hashed_key_for(&i), &(i as u64));
}
let head = linked_map::read_head::<u32, Format>().unwrap();
assert_eq!(
Enumerator::<u32, u32, Format>::from_head(head).collect::<Vec<_>>(),
(0..100).rev().map(|x| (x, x)).collect::<Vec<_>>(),
NumberMap::iter().collect::<Vec<_>>(),
(0..100).map(|x| (x as u32, x as u64)).collect::<Vec<_>>(),
);
// do translation.
NumberMap::translate(
|k: u32| NumberNumber { a: k, b: k },
|v: u32| (v as u64) << 32 | v as u64,
).unwrap();
NumberMap::translate(|k: u32, v: u64| if k % 2 == 0 { Some((k as u64) << 32 | v) } else { None });
assert!(linked_map::read_head::<NumberNumber, Format>().is_some());
assert_eq!(
NumberMap::enumerate().collect::<Vec<_>>(),
(0..100u32).rev().map(|x| (
NumberNumber { a: x, b: x },
(x as u64) << 32 | x as u64,
)).collect::<Vec<_>>(),
NumberMap::iter().collect::<Vec<_>>(),
(0..50u32).map(|x| x * 2).map(|x| (x, (x as u64) << 32 | x as u64)).collect::<Vec<_>>(),
);
})
}
+63 -70
View File
@@ -202,78 +202,60 @@ pub trait StorageMap<K: FullEncode, V: FullCodec> {
/// function for this purpose.
fn decode_len<KeyArg: EncodeLike<K>>(key: KeyArg) -> Result<usize, &'static str>
where V: codec::DecodeLength + Len;
/// Migrate an item with the given `key` from a defunct `OldHasher` to the current hasher.
///
/// If the key doesn't exist, then it's a no-op. If it does, then it returns its value.
fn migrate_key<OldHasher: StorageHasher, KeyArg: EncodeLike<K>>(key: KeyArg) -> Option<V>;
/// Migrate an item with the given `key` from a `blake2_256` hasher to the current hasher.
///
/// If the key doesn't exist, then it's a no-op. If it does, then it returns its value.
fn migrate_key_from_blake<KeyArg: EncodeLike<K>>(key: KeyArg) -> Option<V> {
Self::migrate_key::<crate::hash::Blake2_256, KeyArg>(key)
}
}
/// A strongly-typed linked map in storage.
///
/// Similar to `StorageMap` but allows to enumerate other elements and doesn't implement append.
///
/// Details on implementation can be found at
/// [`generator::StorageLinkedMap`]
pub trait StorageLinkedMap<K: FullCodec, V: FullCodec> {
/// The type that get/take return.
type Query;
/// A strongly-typed map in storage whose keys and values can be iterated over.
pub trait IterableStorageMap<K: FullEncode, V: FullCodec>: StorageMap<K, V> {
/// The type that iterates over all `(key, value)`.
type Enumerator: Iterator<Item = (K, V)>;
type Iterator: Iterator<Item = (K, V)>;
/// Does the value (explicitly) exist in storage?
fn contains_key<KeyArg: EncodeLike<K>>(key: KeyArg) -> bool;
/// Enumerate all elements in the map in no particular order. If you alter the map while doing
/// this, you'll get undefined results.
fn iter() -> Self::Iterator;
/// Load the value associated with the given key from the map.
fn get<KeyArg: EncodeLike<K>>(key: KeyArg) -> Self::Query;
/// Remove all elements from the map and iterate through them in no particular order. If you
/// add elements to the map while doing this, you'll get undefined results.
fn drain() -> Self::Iterator;
/// Swap the values of two keys.
fn swap<KeyArg1: EncodeLike<K>, KeyArg2: EncodeLike<K>>(key1: KeyArg1, key2: KeyArg2);
/// Translate the values of all elements by a function `f`, in the map in no particular order.
/// By returning `None` from `f` for an element, you'll remove it from the map.
fn translate<O: Decode, F: Fn(K, O) -> Option<V>>(f: F);
}
/// Store a value to be associated with the given key from the map.
fn insert<KeyArg: EncodeLike<K>, ValArg: EncodeLike<V>>(key: KeyArg, val: ValArg);
/// A strongly-typed double map in storage whose secondary keys and values can be iterated over.
pub trait IterableStorageDoubleMap<
K1: FullCodec,
K2: FullCodec,
V: FullCodec
>: StorageDoubleMap<K1, K2, V> {
/// The type that iterates over all `(key, value)`.
type Iterator: Iterator<Item = (K2, V)>;
/// Remove the value under a key.
fn remove<KeyArg: EncodeLike<K>>(key: KeyArg);
/// Enumerate all elements in the map with first key `k1` in no particular order. If you add or
/// remove values whose first key is `k1` to the map while doing this, you'll get undefined
/// results.
fn iter(k1: impl EncodeLike<K1>) -> Self::Iterator;
/// Mutate the value under a key.
fn mutate<KeyArg: EncodeLike<K>, R, F: FnOnce(&mut Self::Query) -> R>(key: KeyArg, f: F) -> R;
/// Remove all elements from the map with first key `k1` and iterate through them in no
/// particular order. If you add elements with first key `k1` to the map while doing this,
/// you'll get undefined results.
fn drain(k1: impl EncodeLike<K1>) -> Self::Iterator;
/// Take the value under a key.
fn take<KeyArg: EncodeLike<K>>(key: KeyArg) -> Self::Query;
/// Return current head element.
fn head() -> Option<K>;
/// Enumerate all elements in the map.
fn enumerate() -> Self::Enumerator;
/// Read the length of the value in a fast way, without decoding the entire value.
///
/// `T` is required to implement `Codec::DecodeLength`.
///
/// Note that `0` is returned as the default value if no encoded value exists at the given key.
/// Therefore, this function cannot be used as a sign of _existence_. use the `::contains_key()`
/// function for this purpose.
fn decode_len<KeyArg: EncodeLike<K>>(key: KeyArg) -> Result<usize, &'static str>
where V: codec::DecodeLength + Len;
/// Translate the keys and values from some previous `(K2, V2)` to the current type.
///
/// `TK` translates keys from the old type, and `TV` translates values.
///
/// Returns `Err` if the map could not be interpreted as the old type, and Ok if it could.
/// The `Err` contains the first key which could not be migrated, or `None` if the
/// head of the list could not be read.
///
/// # Warning
///
/// This function must be used with care, before being updated the storage still contains the
/// old type, thus other calls (such as `get`) will fail at decoding it.
///
/// # Usage
///
/// This would typically be called inside the module implementation of on_runtime_upgrade, while
/// ensuring **no usage of this storage are made before the call to `on_runtime_upgrade`**. (More
/// precisely prior initialized modules doesn't make use of this storage).
fn translate<K2, V2, TK, TV>(translate_key: TK, translate_val: TV) -> Result<(), Option<K2>>
where K2: FullCodec + Clone, V2: Decode, TK: Fn(K2) -> K, TV: Fn(V2) -> V;
/// Translate the values of all elements by a function `f`, in the map in no particular order.
/// By returning `None` from `f` for an element, you'll remove it from the map.
fn translate<O: Decode, F: Fn(O) -> Option<V>>(f: F);
}
/// An implementation of a map with a two keys.
@@ -377,6 +359,17 @@ pub trait StorageDoubleMap<K1: FullEncode, K2: FullEncode, V: FullCodec> {
KArg1: EncodeLike<K1>,
KArg2: EncodeLike<K2>,
V: codec::DecodeLength + Len;
/// Migrate an item with the given `key1` and `key2` from defunct `OldHasher1` and
/// `OldHasher2` to the current hashers.
///
/// If the key doesn't exist, then it's a no-op. If it does, then it returns its value.
fn migrate_keys<
OldHasher1: StorageHasher,
OldHasher2: StorageHasher,
KeyArg1: EncodeLike<K1>,
KeyArg2: EncodeLike<K2>,
>(key1: KeyArg1, key2: KeyArg2) -> Option<V>;
}
/// Iterator for prefixed map.
@@ -440,7 +433,7 @@ pub trait StoragePrefixedMap<Value: FullCodec> {
}
/// Iter over all value of the storage.
fn iter() -> PrefixIterator<Value> {
fn iter_values() -> PrefixIterator<Value> {
let prefix = Self::final_prefix();
PrefixIterator {
prefix: prefix.to_vec(),
@@ -535,26 +528,26 @@ mod test {
assert_eq!(MyStorage::final_prefix().to_vec(), k);
// test iteration
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![]);
unhashed::put(&[&k[..], &vec![1][..]].concat(), &1u64);
unhashed::put(&[&k[..], &vec![1, 1][..]].concat(), &2u64);
unhashed::put(&[&k[..], &vec![8][..]].concat(), &3u64);
unhashed::put(&[&k[..], &vec![10][..]].concat(), &4u64);
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![1, 2, 3, 4]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![1, 2, 3, 4]);
// test removal
MyStorage::remove_all();
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![]);
// test migration
unhashed::put(&[&k[..], &vec![1][..]].concat(), &1u32);
unhashed::put(&[&k[..], &vec![8][..]].concat(), &2u32);
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![]);
MyStorage::translate_values(|v: u32| v as u64).unwrap();
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![1, 2]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![1, 2]);
MyStorage::remove_all();
// test migration 2
@@ -564,9 +557,9 @@ mod test {
unhashed::put(&[&k[..], &vec![10][..]].concat(), &4u32);
// (contains some value that successfully decoded to u64)
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![1, 2, 3]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![1, 2, 3]);
assert_eq!(MyStorage::translate_values(|v: u128| v as u64), Err(2));
assert_eq!(MyStorage::iter().collect::<Vec<_>>(), vec![1, 3]);
assert_eq!(MyStorage::iter_values().collect::<Vec<_>>(), vec![1, 3]);
MyStorage::remove_all();
// test that other values are not modified.